ngram
listlengths
0
67.8k
[]
[ "sender = os.environ.get('SENDER', 'Specified environment variable is not set.') datastore_client = DataStore() email_sender", "last_updated_saved = context['last_updated_at'] print('Last saved update date was: ' + last_updated_saved) print('Current update", "recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel Ban Cron Job Notification'", "datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel restrictions", "os.environ.get('SENDER', 'Specified environment variable is not set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key)", "set.') sender = os.environ.get('SENDER', 'Specified environment variable is not set.') datastore_client = DataStore()", "content = get_message_content(last_update) subject = 'Travel Ban Cron Job Notification' email_sender.send(sender, recipients, subject,", "\"Go to \" + travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context =", "from datastore import DataStore from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment", "EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel restrictions page was recently updated (\"", "last_update + \").\\n\" \"Go to \" + travel_site_url) def send_email_notification(event, context): last_update =", "environment variable is not set.') sender = os.environ.get('SENDER', 'Specified environment variable is not", "set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel", "def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last", "EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL',", "variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.')", "print('Last saved update date was: ' + last_updated_saved) print('Current update date is: '", "environment variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not", "to \" + travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context()", "= os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment", "not set.') sender = os.environ.get('SENDER', 'Specified environment variable is not set.') datastore_client =", "context['last_updated_at'] print('Last saved update date was: ' + last_updated_saved) print('Current update date is:", "sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified", "travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.') sender = os.environ.get('SENDER', 'Specified", "+ \").\\n\" \"Go to \" + travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8')", "' + last_updated_saved) print('Current update date is: ' + last_update) if last_update !=", "date is: ' + last_update) if last_update != last_updated_saved: print('A new update was", "notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel", "= get_message_content(last_update) subject = 'Travel Ban Cron Job Notification' email_sender.send(sender, recipients, subject, content)", "context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update date was: ' +", "is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.') sender", "datastore import DataStore from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable", "environment variable is not set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update):", "= os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.') sender = os.environ.get('SENDER', 'Specified environment", "'Specified environment variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is", "update date is: ' + last_update) if last_update != last_updated_saved: print('A new update", "os from datastore import DataStore from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified", "datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel Ban Cron", "was: ' + last_updated_saved) print('Current update date is: ' + last_update) if last_update", "import os from datastore import DataStore from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY',", "= EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel restrictions page was recently updated", "pushed. Updating database and notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content =", "is not set.') sender = os.environ.get('SENDER', 'Specified environment variable is not set.') datastore_client", "print('A new update was pushed. Updating database and notifying subscribers') datastore_client.update_context(context, last_update) recipients", "!= last_updated_saved: print('A new update was pushed. Updating database and notifying subscribers') datastore_client.update_context(context,", "context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update", "'Specified environment variable is not set.') sender = os.environ.get('SENDER', 'Specified environment variable is", "variable is not set.') sender = os.environ.get('SENDER', 'Specified environment variable is not set.')", "get_message_content(last_update): return (\"US Gov Travel restrictions page was recently updated (\" + last_update", "= os.environ.get('SENDER', 'Specified environment variable is not set.') datastore_client = DataStore() email_sender =", "page was recently updated (\" + last_update + \").\\n\" \"Go to \" +", "+ last_update + \").\\n\" \"Go to \" + travel_site_url) def send_email_notification(event, context): last_update", "if last_update != last_updated_saved: print('A new update was pushed. Updating database and notifying", "= datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel Ban Cron Job Notification' email_sender.send(sender,", "subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel Ban", "is: ' + last_update) if last_update != last_updated_saved: print('A new update was pushed.", "datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update date was: ' + last_updated_saved) print('Current", "(\" + last_update + \").\\n\" \"Go to \" + travel_site_url) def send_email_notification(event, context):", "+ last_updated_saved) print('Current update date is: ' + last_update) if last_update != last_updated_saved:", "Travel restrictions page was recently updated (\" + last_update + \").\\n\" \"Go to", "travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at']", "not set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov", "Gov Travel restrictions page was recently updated (\" + last_update + \").\\n\" \"Go", "recently updated (\" + last_update + \").\\n\" \"Go to \" + travel_site_url) def", "update was pushed. Updating database and notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients()", "def get_message_content(last_update): return (\"US Gov Travel restrictions page was recently updated (\" +", "database and notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject", "last_update) if last_update != last_updated_saved: print('A new update was pushed. Updating database and", "DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel restrictions page was", "'Specified environment variable is not set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def", "import base64 import os from datastore import DataStore from emailsender import EmailSender sendgrid_api_key", "' + last_update) if last_update != last_updated_saved: print('A new update was pushed. Updating", "not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.') sender =", "= context['last_updated_at'] print('Last saved update date was: ' + last_updated_saved) print('Current update date", "last_updated_saved) print('Current update date is: ' + last_update) if last_update != last_updated_saved: print('A", "set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.') sender = os.environ.get('SENDER',", "Updating database and notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update)", "os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.') travel_site_url = os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable", "was recently updated (\" + last_update + \").\\n\" \"Go to \" + travel_site_url)", "was pushed. Updating database and notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content", "DataStore from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not", "from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.')", "os.environ.get('TRAVEL_SITE_URL', 'Specified environment variable is not set.') sender = os.environ.get('SENDER', 'Specified environment variable", "= DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel restrictions page", "email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US Gov Travel restrictions page was recently", "send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved", "variable is not set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return", "return (\"US Gov Travel restrictions page was recently updated (\" + last_update +", "update date was: ' + last_updated_saved) print('Current update date is: ' + last_update)", "last_update != last_updated_saved: print('A new update was pushed. Updating database and notifying subscribers')", "saved update date was: ' + last_updated_saved) print('Current update date is: ' +", "datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel Ban Cron Job Notification' email_sender.send(sender, recipients,", "base64 import os from datastore import DataStore from emailsender import EmailSender sendgrid_api_key =", "+ travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved =", "emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.') travel_site_url", "\").\\n\" \"Go to \" + travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context", "import DataStore from emailsender import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is", "new update was pushed. Updating database and notifying subscribers') datastore_client.update_context(context, last_update) recipients =", "restrictions page was recently updated (\" + last_update + \").\\n\" \"Go to \"", "updated (\" + last_update + \").\\n\" \"Go to \" + travel_site_url) def send_email_notification(event,", "is not set.') datastore_client = DataStore() email_sender = EmailSender(sendgrid_api_key) def get_message_content(last_update): return (\"US", "last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update date", "(\"US Gov Travel restrictions page was recently updated (\" + last_update + \").\\n\"", "\" + travel_site_url) def send_email_notification(event, context): last_update = base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved", "date was: ' + last_updated_saved) print('Current update date is: ' + last_update) if", "+ last_update) if last_update != last_updated_saved: print('A new update was pushed. Updating database", "last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject = 'Travel Ban Cron Job", "= base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update date was:", "print('Current update date is: ' + last_update) if last_update != last_updated_saved: print('A new", "import EmailSender sendgrid_api_key = os.environ.get('SENDGRID_EMAIL_API_KEY', 'Specified environment variable is not set.') travel_site_url =", "= datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update date was: ' + last_updated_saved)", "last_updated_saved: print('A new update was pushed. Updating database and notifying subscribers') datastore_client.update_context(context, last_update)", "base64.b64decode(event['data']).decode('utf-8') context = datastore_client.get_context() last_updated_saved = context['last_updated_at'] print('Last saved update date was: '", "and notifying subscribers') datastore_client.update_context(context, last_update) recipients = datastore_client.get_recipients() content = get_message_content(last_update) subject =" ]
[ "tomo_path: str Simulation directory or .npy file of a reconstructed simulation approx: str", "2 or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref):", "# reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv = metric_tv(ri, riref)", "\"\"\"Total variation metric (normalized) This metric was used and described in Müller et.", ".npy files!\" else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference", "a directory or an .npy file!\") tv = None ss = None #", "grad = np.gradient(ri.real-ref) result = 0 for g in grad: result += np.sum(cutout(np.abs(g)))", "in lines: line = line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass", "ss, tv def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\" x =", "'_results' appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise", "cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c =", "= metric_rms(ri, riref) tv = metric_tv(ri, riref) # Save result in resf files", "errors Notes ----- A second call with the same arguments will be fast,", "described in Müller et. al, \"ODTbrain: a Python library for full-view, dense diffraction", "if tv is None or ss is None: if npy_file: ri = np.load(npy_file)", "= res_dir[:-8] msg = \"Simulation directory not found! The .npy file should be", "open(metr_file, \"r\") as fd: lines = fd.readlines() for line in lines: line =", "x.reshape(-1, 1) y = x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2) < c**2", "and read parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines = fd.readlines()", "1) y = x.reshape(1, -1, 1) z = x.reshape(1, -1, 1) zero =", "x = x.reshape(-1, 1, 1) y = x.reshape(1, -1, 1) z = x.reshape(1,", "{:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def cutout(a):", "= line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try:", "with the same arguments will be fast, because the result is saved on", "ss) resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic", "directory not found! The .npy file should be in a \" +\\ \"folder", "None ss = None # Check if the results_file exists and read parameters", "ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri,", "center of rotation in the simulation). This only makes sense if `tomo_path` is", "to use, one of [\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`, perform autofocusing.", "as np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common", "2: x = x.reshape(-1, 1) y = x.reshape(1, -1) zero = ((x-c)**2 +", "else: raise ValueError(\"simulation must be a directory or an .npy file!\") tv =", "False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation directory", "= extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv = metric_tv(ri, riref) # Save result", "os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation must be", "is None or ss is None: if npy_file: ri = np.load(npy_file) assert autofocus", "x.reshape(1, -1, 1) zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2 else:", "reconstruction Parameters ---------- tomo_path: str Simulation directory or .npy file of a reconstructed", "in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir", "the result is saved on disk. See Also -------- metric_rms, metric_tv: The used", "common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path))", "an .npy file!\") tv = None ss = None # Check if the", "\"Simulation directory not found! The .npy file should be in a \" +\\", "is saved on disk. See Also -------- metric_rms, metric_tv: The used metrics \"\"\"", "A second call with the same arguments will be fast, because the result", "1) z = x.reshape(1, -1, 1) zero = ((x-c)**2 + (y-c)**2 + (z-c)**2)", "reconstructed simulation approx: str Approximation to use, one of [\"radon\", \"born\", \"rytov\"] autofocus:", "c**2 elif len(a.shape) == 3: x = x.reshape(-1, 1, 1) y = x.reshape(1,", "for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result =", "+= \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv", "tv = metric_tv(ri, riref) # Save result in resf files with open(metr_file, \"a\")", "lines: line = line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass elif", "metrics \"\"\" assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path):", "This metric was used and described in Müller et. al, \"ODTbrain: a Python", "norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This", "autofocusing. If `False` uses the exact focusing (the center of rotation in the", "npy_file = False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg =", "= 0 for g in grad: result += np.sum(cutout(np.abs(g))) tv = result /", "tv is None or ss is None: if npy_file: ri = np.load(npy_file) assert", "was used and described in Müller et. al, \"ODTbrain: a Python library for", "msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation must be a", "reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv = metric_tv(ri, riref) #", "tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file", "Also -------- metric_rms, metric_tv: The used metrics \"\"\" assert approx in [\"radon\", \"born\",", "of [\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`, perform autofocusing. If `False` uses", "on disk. See Also -------- metric_rms, metric_tv: The used metrics \"\"\" assert approx", "\"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir)", "\"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\"", "second call with the same arguments will be fast, because the result is", "\" +\\ \"folder named after the simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg", "= \"Simulation directory not found! The .npy file should be in a \"", "use, one of [\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`, perform autofocusing. If", "if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\")", "= a.shape[0] / 2 if len(a.shape) == 2: x = x.reshape(-1, 1) y", "dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return", "return a def metric_rms(ri, ref): \"\"\"Root mean square metric (normalized) This metric was", "= np.gradient(ri.real-ref) result = 0 for g in grad: result += np.sum(cutout(np.abs(g))) tv", "everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss =", "= os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation directory not found! The .npy", "The .npy file should be in a \" +\\ \"folder named after the", "= x.reshape(1, -1, 1) z = x.reshape(1, -1, 1) zero = ((x-c)**2 +", "= np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This metric", "in resf files with open(metr_file, \"a\") as resfdata: lines = \"# metrices of", "= np.arange(a.shape[0]) c = a.shape[0] / 2 if len(a.shape) == 2: x =", "meep_tomo import extract, common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and", "len(a.shape) == 2: x = x.reshape(-1, 1) y = x.reshape(1, -1) zero =", "result = 0 for g in grad: result += np.sum(cutout(np.abs(g))) tv = result", "\"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0]", "or ss is None: if npy_file: ri = np.load(npy_file) assert autofocus == False,", "as fd: lines = fd.readlines() for line in lines: line = line.strip() if", "of rotation in the simulation). This only makes sense if `tomo_path` is not", "from meep_tomo import extract, common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS", "files with open(metr_file, \"a\") as resfdata: lines = \"# metrices of ri-riref\\n\" lines", "parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines = fd.readlines() for line", "simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path", "not found! The .npy file should be in a \" +\\ \"folder named", "------- rms, tv: floats root-mean-square and total variation errors Notes ----- A second", "for a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str Simulation directory or .npy", "ODT reconstruction Parameters ---------- tomo_path: str Simulation directory or .npy file of a", "res_dir[:-8] msg = \"Simulation directory not found! The .npy file should be in", "be fast, because the result is saved on disk. See Also -------- metric_rms,", "os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir =", "except: pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass if tv is", "tv: floats root-mean-square and total variation errors Notes ----- A second call with", "arguments will be fast, because the result is saved on disk. See Also", "os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir,", "---------- tomo_path: str Simulation directory or .npy file of a reconstructed simulation approx:", "np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import", "Parameters ---------- tomo_path: str Simulation directory or .npy file of a reconstructed simulation", "used and described in Müller et. al, \"ODTbrain: a Python library for full-view,", "full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2)", "(y-c)**2 + (z-c)**2) < c**2 else: raise ValueError(\"Cutout array must have dimension 2", "Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref)", "os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation directory not found! The .npy file", "< c**2 else: raise ValueError(\"Cutout array must have dimension 2 or 3!\") a", "metric_rms(ri, riref) tv = metric_tv(ri, riref) # Save result in resf files with", "if `tomo_path` is not an .npy file. Returns ------- rms, tv: floats root-mean-square", "sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import ex_bpg def compute_metrices(tomo_path, approx,", "file. Returns ------- rms, tv: floats root-mean-square and total variation errors Notes -----", "fd: lines = fd.readlines() for line in lines: line = line.strip() if line.startswith(\"TV_\"+approx):", "None # Check if the results_file exists and read parameters if os.path.exists(metr_file): with", "a \" +\\ \"folder named after the simulation with '_results' appended!\" assert os.path.exists(sim_dir),", "Returns ------- rms, tv: floats root-mean-square and total variation errors Notes ----- A", "al, \"ODTbrain: a Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\"", "sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False):", "for g in grad: result += np.sum(cutout(np.abs(g))) tv = result / len(grad) norm", "== 2: x = x.reshape(-1, 1) y = x.reshape(1, -1) zero = ((x-c)**2", "npy_file: ri = np.load(npy_file) assert autofocus == False, \"`autofocus` has no effect for", "Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri,", "metric_rms(ri, ref): \"\"\"Root mean square metric (normalized) This metric was used and described", "extract, common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV metrices", "must be a directory or an .npy file!\") tv = None ss =", "tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation directory not found!", "g in grad: result += np.sum(cutout(np.abs(g))) tv = result / len(grad) norm =", "total variation errors Notes ----- A second call with the same arguments will", "result += np.sum(cutout(np.abs(g))) tv = result / len(grad) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(tv/norm)", "= fd.readlines() for line in lines: line = line.strip() if line.startswith(\"TV_\"+approx): try: tv", "appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation", "np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric (normalized)", "+\\ \"folder named after the simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file", "[\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir =", "def metric_rms(ri, ref): \"\"\"Root mean square metric (normalized) This metric was used and", "[\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`, perform autofocusing. If `False` uses the", "rms, tv: floats root-mean-square and total variation errors Notes ----- A second call", "ss = None # Check if the results_file exists and read parameters if", "= os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file =", "result in resf files with open(metr_file, \"a\") as resfdata: lines = \"# metrices", "resfdata: lines = \"# metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines", "# Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir)", "has no effect for .npy files!\" else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir,", "elif len(a.shape) == 3: x = x.reshape(-1, 1, 1) y = x.reshape(1, -1,", ".npy file. Returns ------- rms, tv: floats root-mean-square and total variation errors Notes", "(the center of rotation in the simulation). This only makes sense if `tomo_path`", "ValueError(\"Cutout array must have dimension 2 or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\")", ".npy file should be in a \" +\\ \"folder named after the simulation", "os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines = fd.readlines() for line in lines:", "riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv = metric_tv(ri, riref) # Save", "Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result = 0 for g in grad:", "a def metric_rms(ri, ref): \"\"\"Root mean square metric (normalized) This metric was used", "the same arguments will be fast, because the result is saved on disk.", "rotation in the simulation). This only makes sense if `tomo_path` is not an", "tomo_path else: raise ValueError(\"simulation must be a directory or an .npy file!\") tv", "= None # Check if the results_file exists and read parameters if os.path.exists(metr_file):", "be in a \" +\\ \"folder named after the simulation with '_results' appended!\"", "Müller et. al, \"ODTbrain: a Python library for full-view, dense diffraction tomography\" Bioinformatics", "dimension 2 or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri,", "= x.reshape(1, -1, 1) zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2", "sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file =", "file of a reconstructed simulation approx: str Approximation to use, one of [\"radon\",", "(y-c)**2) < c**2 elif len(a.shape) == 3: x = x.reshape(-1, 1, 1) y", "have dimension 2 or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def", "results_file exists and read parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines", "in grad: result += np.sum(cutout(np.abs(g))) tv = result / len(grad) norm = np.sum(cutout(ref.real-1)**2)", "x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape) == 3:", "-1, 1) zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2 else: raise", "diffraction tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm)", "file should be in a \" +\\ \"folder named after the simulation with", "approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv =", "2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref):", "focusing (the center of rotation in the simulation). This only makes sense if", "\"r\") as fd: lines = fd.readlines() for line in lines: line = line.strip()", "will be fast, because the result is saved on disk. See Also --------", "approx: str Approximation to use, one of [\"radon\", \"born\", \"rytov\"] autofocus: bool If", "the examples \"\"\" import numpy as np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\")", "tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation must be a directory or an", "If `True`, perform autofocusing. If `False` uses the exact focusing (the center of", "line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss", "import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import ex_bpg", "scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root mean square metric (normalized) This metric", "call with the same arguments will be fast, because the result is saved", "+ (y-c)**2) < c**2 elif len(a.shape) == 3: x = x.reshape(-1, 1, 1)", "library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result", "2015 \"\"\" grad = np.gradient(ri.real-ref) result = 0 for g in grad: result", "lines = fd.readlines() for line in lines: line = line.strip() if line.startswith(\"TV_\"+approx): try:", "zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2 else: raise ValueError(\"Cutout array", "z = x.reshape(1, -1, 1) zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) <", "import numpy as np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import", "= float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass if", "rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation", "be a directory or an .npy file!\") tv = None ss = None", "metric was used and described in Müller et. al, \"ODTbrain: a Python library", "a Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad =", "effect for .npy files!\" else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus)", "uses the exact focusing (the center of rotation in the simulation). This only", "read parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines = fd.readlines() for", "= x.reshape(-1, 1, 1) y = x.reshape(1, -1, 1) z = x.reshape(1, -1,", "is not an .npy file. Returns ------- rms, tv: floats root-mean-square and total", "and total variation errors Notes ----- A second call with the same arguments", "\"\"\" assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir", "npy_file = tomo_path else: raise ValueError(\"simulation must be a directory or an .npy", "dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result = 0 for", "and described in Müller et. al, \"ODTbrain: a Python library for full-view, dense", "x.reshape(1, -1, 1) z = x.reshape(1, -1, 1) zero = ((x-c)**2 + (y-c)**2", "`tomo_path` is not an .npy file. Returns ------- rms, tv: floats root-mean-square and", "return ss, tv def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\" x", "must have dimension 2 or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a", "def metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This metric was used and described", "c = a.shape[0] / 2 if len(a.shape) == 2: x = x.reshape(-1, 1)", "else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref =", "str Simulation directory or .npy file of a reconstructed simulation approx: str Approximation", "line in lines: line = line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except:", "exists and read parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines =", "-1) zero = ((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape) == 3: x", "3: x = x.reshape(-1, 1, 1) y = x.reshape(1, -1, 1) z =", "zero = ((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape) == 3: x =", "= False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation", "os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file", "tv def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0])", "np.gradient(ri.real-ref) result = 0 for g in grad: result += np.sum(cutout(np.abs(g))) tv =", "ss is None: if npy_file: ri = np.load(npy_file) assert autofocus == False, \"`autofocus`", "\"`autofocus` has no effect for .npy files!\" else: # Recompute everything ri =", "elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass if tv is None or", "float(line.split()[1]) except: pass if tv is None or ss is None: if npy_file:", "tv = None ss = None # Check if the results_file exists and", "ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return", "((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2 else: raise ValueError(\"Cutout array must have", "Save result in resf files with open(metr_file, \"a\") as resfdata: lines = \"#", "Check if the results_file exists and read parameters if os.path.exists(metr_file): with open(metr_file, \"r\")", "assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir =", "with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else:", "directory or .npy file of a reconstructed simulation approx: str Approximation to use,", "\"\"\"Tools used by the examples \"\"\" import numpy as np import os import", "== 3: x = x.reshape(-1, 1, 1) y = x.reshape(1, -1, 1) z", "tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut", "open(metr_file, \"a\") as resfdata: lines = \"# metrices of ri-riref\\n\" lines += \"TV_{}", "if the results_file exists and read parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as", "else: raise ValueError(\"Cutout array must have dimension 2 or 3!\") a *= zero", "x.reshape(-1, 1, 1) y = x.reshape(1, -1, 1) z = x.reshape(1, -1, 1)", "in a \" +\\ \"folder named after the simulation with '_results' appended!\" assert", "`True`, perform autofocusing. If `False` uses the exact focusing (the center of rotation", "= os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False", "ss = float(line.split()[1]) except: pass if tv is None or ss is None:", "y = x.reshape(1, -1, 1) z = x.reshape(1, -1, 1) zero = ((x-c)**2", "1) y = x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2) < c**2 elif", "x = x.reshape(-1, 1) y = x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2)", "approx, autofocus=False): \"\"\"Compute RMS and TV metrices for a MEEP-simulated ODT reconstruction Parameters", "\"rytov\"] autofocus: bool If `True`, perform autofocusing. If `False` uses the exact focusing", "= x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape) ==", "with open(metr_file, \"a\") as resfdata: lines = \"# metrices of ri-riref\\n\" lines +=", "= np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric", "perform autofocusing. If `False` uses the exact focusing (the center of rotation in", "= tomo_path else: raise ValueError(\"simulation must be a directory or an .npy file!\")", "file!\") tv = None ss = None # Check if the results_file exists", "autofocus: bool If `True`, perform autofocusing. If `False` uses the exact focusing (the", "os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8]", "metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss)", "if len(a.shape) == 2: x = x.reshape(-1, 1) y = x.reshape(1, -1) zero", "np.arange(a.shape[0]) c = a.shape[0] / 2 if len(a.shape) == 2: x = x.reshape(-1,", "variation metric (normalized) This metric was used and described in Müller et. al,", "square/cubic array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0] / 2 if len(a.shape) ==", "np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This metric was", "raise ValueError(\"Cutout array must have dimension 2 or 3!\") a *= zero #tool.arr2im(a,", "\"ODTbrain: a Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms", "res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation directory not found! The", "1, 1) y = x.reshape(1, -1, 1) z = x.reshape(1, -1, 1) zero", "resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\"", "root-mean-square and total variation errors Notes ----- A second call with the same", "sim_dir = res_dir[:-8] msg = \"Simulation directory not found! The .npy file should", "pass if tv is None or ss is None: if npy_file: ri =", "= metric_tv(ri, riref) # Save result in resf files with open(metr_file, \"a\") as", "-------- metric_rms, metric_tv: The used metrics \"\"\" assert approx in [\"radon\", \"born\", \"rytov\"]", "2 if len(a.shape) == 2: x = x.reshape(-1, 1) y = x.reshape(1, -1)", "ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv", "metric_rms, metric_tv: The used metrics \"\"\" assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path", "\"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def", "The used metrics \"\"\" assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path)", "a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str Simulation directory or .npy file", "= os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir", "pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass if tv is None", "if npy_file: ri = np.load(npy_file) assert autofocus == False, \"`autofocus` has no effect", "----- A second call with the same arguments will be fast, because the", "compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV metrices for a MEEP-simulated ODT reconstruction", "np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This metric was used and", "grad: result += np.sum(cutout(np.abs(g))) tv = result / len(grad) norm = np.sum(cutout(ref.real-1)**2) return", "try: ss = float(line.split()[1]) except: pass if tv is None or ss is", "2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0] / 2 if len(a.shape)", "line = line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx):", "sense if `tomo_path` is not an .npy file. Returns ------- rms, tv: floats", "msg = \"Simulation directory not found! The .npy file should be in a", ".npy file!\") tv = None ss = None # Check if the results_file", "of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines)", "autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv = metric_tv(ri,", "or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root", "\"\"\"Root mean square metric (normalized) This metric was used and described in Müller", "a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root mean square", "\"\"\" grad = np.gradient(ri.real-ref) result = 0 for g in grad: result +=", "= None ss = None # Check if the results_file exists and read", "common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV metrices for", "\"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg", "or .npy file of a reconstructed simulation approx: str Approximation to use, one", "\"# metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx,", "found! The .npy file should be in a \" +\\ \"folder named after", "If `False` uses the exact focusing (the center of rotation in the simulation).", "mean square metric (normalized) This metric was used and described in Müller et.", "len(a.shape) == 3: x = x.reshape(-1, 1, 1) y = x.reshape(1, -1, 1)", "See Also -------- metric_rms, metric_tv: The used metrics \"\"\" assert approx in [\"radon\",", "zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root mean square metric (normalized)", "should be in a \" +\\ \"folder named after the simulation with '_results'", "directory or an .npy file!\") tv = None ss = None # Check", "\"folder named after the simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file =", "metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir", "float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass if tv", "for line in lines: line = line.strip() if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1])", "\"born\", \"rytov\"] autofocus: bool If `True`, perform autofocusing. If `False` uses the exact", "assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation must", "for .npy files!\" else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) #", "def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c", "None or ss is None: if npy_file: ri = np.load(npy_file) assert autofocus ==", "elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir = res_dir[:-8] msg = \"Simulation directory not", "square metric (normalized) This metric was used and described in Müller et. al,", "RMS and TV metrices for a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str", "+ (y-c)**2 + (z-c)**2) < c**2 else: raise ValueError(\"Cutout array must have dimension", "of a reconstructed simulation approx: str Approximation to use, one of [\"radon\", \"born\",", "ri = np.load(npy_file) assert autofocus == False, \"`autofocus` has no effect for .npy", "a Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms =", "autofocus == False, \"`autofocus` has no effect for .npy files!\" else: # Recompute", "saved on disk. See Also -------- metric_rms, metric_tv: The used metrics \"\"\" assert", "an .npy file. Returns ------- rms, tv: floats root-mean-square and total variation errors", "only makes sense if `tomo_path` is not an .npy file. Returns ------- rms,", "riref) tv = metric_tv(ri, riref) # Save result in resf files with open(metr_file,", "= tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation must be a directory or", "TV metrices for a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str Simulation directory", "disk. See Also -------- metric_rms, metric_tv: The used metrics \"\"\" assert approx in", "c**2 else: raise ValueError(\"Cutout array must have dimension 2 or 3!\") a *=", "line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1])", "metric_tv(ri, riref) # Save result in resf files with open(metr_file, \"a\") as resfdata:", "Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2)", "from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0] / 2 if", "\"\"\" import numpy as np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo", "MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str Simulation directory or .npy file of", "y = x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape)", "ss = metric_rms(ri, riref) tv = metric_tv(ri, riref) # Save result in resf", "makes sense if `tomo_path` is not an .npy file. Returns ------- rms, tv:", "\"a\") as resfdata: lines = \"# metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx,", "et. al, \"ODTbrain: a Python library for full-view, dense diffraction tomography\" Bioinformatics 2015", "assert autofocus == False, \"`autofocus` has no effect for .npy files!\" else: #", "Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss", "+ (z-c)**2) < c**2 else: raise ValueError(\"Cutout array must have dimension 2 or", "metric (normalized) This metric was used and described in Müller et. al, \"ODTbrain:", "try: tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except:", "one of [\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`, perform autofocusing. If `False`", "*= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root mean square metric", "named after the simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\"", "the simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file =", "after the simulation with '_results' appended!\" assert os.path.exists(sim_dir), msg metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file", "Notes ----- A second call with the same arguments will be fast, because", "as resfdata: lines = \"# metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv)", "simulation approx: str Approximation to use, one of [\"radon\", \"born\", \"rytov\"] autofocus: bool", "in Müller et. al, \"ODTbrain: a Python library for full-view, dense diffraction tomography\"", "lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss,", "os.path.abspath(tomo_path) res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False elif", "== False, \"`autofocus` has no effect for .npy files!\" else: # Recompute everything", "is None: if npy_file: ri = np.load(npy_file) assert autofocus == False, \"`autofocus` has", "or an .npy file!\") tv = None ss = None # Check if", "0 for g in grad: result += np.sum(cutout(np.abs(g))) tv = result / len(grad)", "by the examples \"\"\" import numpy as np import os import sys sys.path.insert(0,", "= ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2 else: raise ValueError(\"Cutout array must", "exact focusing (the center of rotation in the simulation). This only makes sense", "np.load(npy_file) assert autofocus == False, \"`autofocus` has no effect for .npy files!\" else:", "< c**2 elif len(a.shape) == 3: x = x.reshape(-1, 1, 1) y =", "import extract, common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV", "= x.reshape(-1, 1) y = x.reshape(1, -1) zero = ((x-c)**2 + (y-c)**2) <", "in the simulation). This only makes sense if `tomo_path` is not an .npy", "#tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root mean square metric (normalized) This", "simulation). This only makes sense if `tomo_path` is not an .npy file. Returns", "files!\" else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref", "circle/sphere from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0] / 2", "x = np.arange(a.shape[0]) c = a.shape[0] / 2 if len(a.shape) == 2: x", "riref) # Save result in resf files with open(metr_file, \"a\") as resfdata: lines", "array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0] / 2 if len(a.shape) == 2:", "= \"# metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines += \"SS_{}", "metric_tv: The used metrics \"\"\" assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path =", "line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass if tv is None or ss", "ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV metrices for a MEEP-simulated", "res_dir = os.path.abspath(tomo_path)+\"_results\" common.mkdir_p(res_dir) metr_file = os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"):", "\"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut out circle/sphere from", "floats root-mean-square and total variation errors Notes ----- A second call with the", "a.shape[0] / 2 if len(a.shape) == 2: x = x.reshape(-1, 1) y =", "same arguments will be fast, because the result is saved on disk. See", "because the result is saved on disk. See Also -------- metric_rms, metric_tv: The", "used by the examples \"\"\" import numpy as np import os import sys", "result is saved on disk. See Also -------- metric_rms, metric_tv: The used metrics", "array must have dimension 2 or 3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return", ".npy file of a reconstructed simulation approx: str Approximation to use, one of", "os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute", "\"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total", "(z-c)**2) < c**2 else: raise ValueError(\"Cutout array must have dimension 2 or 3!\")", "ValueError(\"simulation must be a directory or an .npy file!\") tv = None ss", "lines = \"# metrices of ri-riref\\n\" lines += \"TV_{} {:.15e}\\n\".format(approx, tv) lines +=", "# Check if the results_file exists and read parameters if os.path.exists(metr_file): with open(metr_file,", "\"\"\"Compute RMS and TV metrices for a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path:", "metrices for a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str Simulation directory or", "tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss = float(line.split()[1]) except: pass", "tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm = np.sum(cutout(ref.real-1)**2) return np.sqrt(rms/norm) def", "fast, because the result is saved on disk. See Also -------- metric_rms, metric_tv:", "= os.path.join(res_dir, \"metrices.txt\") npy_file = False elif tomo_path.endswith(\".npy\"): res_dir = os.path.dirname(os.path.abspath(tomo_path)) sim_dir =", "approx in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if os.path.isdir(tomo_path): sim_dir = os.path.abspath(tomo_path)", "`False` uses the exact focusing (the center of rotation in the simulation). This", "raise ValueError(\"simulation must be a directory or an .npy file!\") tv = None", "\"ODTbrain: a Python library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad", "if line.startswith(\"TV_\"+approx): try: tv = float(line.split()[1]) except: pass elif line.startswith(\"SS_\"+approx): try: ss =", "for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm =", "Simulation directory or .npy file of a reconstructed simulation approx: str Approximation to", "((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape) == 3: x = x.reshape(-1, 1,", "return np.sqrt(rms/norm) def metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This metric was used", "if os.path.exists(metr_file): with open(metr_file, \"r\") as fd: lines = fd.readlines() for line in", "1) zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2 else: raise ValueError(\"Cutout", "not an .npy file. Returns ------- rms, tv: floats root-mean-square and total variation", "except: pass if tv is None or ss is None: if npy_file: ri", "the simulation). This only makes sense if `tomo_path` is not an .npy file.", "# Save result in resf files with open(metr_file, \"a\") as resfdata: lines =", "resf files with open(metr_file, \"a\") as resfdata: lines = \"# metrices of ri-riref\\n\"", "used metrics \"\"\" assert approx in [\"radon\", \"born\", \"rytov\"] tomo_path = os.path.abspath(tomo_path) if", "ref): \"\"\"Total variation metric (normalized) This metric was used and described in Müller", "3!\") a *= zero #tool.arr2im(a, scale=True).save(\"test.png\") return a def metric_rms(ri, ref): \"\"\"Root mean", "import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import ex_bpg def compute_metrices(tomo_path,", "no effect for .npy files!\" else: # Recompute everything ri = ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx,", "import ex_bpg def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV metrices for a", "/ 2 if len(a.shape) == 2: x = x.reshape(-1, 1) y = x.reshape(1,", "= ex_bpg.backpropagate_fdtd_data(sim_dir, approximation=approx, autofocus=autofocus) # reference riref = extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref)", "extract.get_tomo_ri_structure(sim_dir) ss = metric_rms(ri, riref) tv = metric_tv(ri, riref) # Save result in", "autofocus=False): \"\"\"Compute RMS and TV metrices for a MEEP-simulated ODT reconstruction Parameters ----------", "This only makes sense if `tomo_path` is not an .npy file. Returns -------", "os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract, common import ex_bpg def", "bool If `True`, perform autofocusing. If `False` uses the exact focusing (the center", "variation errors Notes ----- A second call with the same arguments will be", "= float(line.split()[1]) except: pass if tv is None or ss is None: if", "{:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut out circle/sphere from 2D/3D", "library for full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" rms = np.sum(cutout(ri.real-ref.real)**2) norm", "full-view, dense diffraction tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result = 0", "and TV metrices for a MEEP-simulated ODT reconstruction Parameters ---------- tomo_path: str Simulation", "examples \"\"\" import numpy as np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from", "diffraction tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result = 0 for g", "False, \"`autofocus` has no effect for .npy files!\" else: # Recompute everything ri", "the results_file exists and read parameters if os.path.exists(metr_file): with open(metr_file, \"r\") as fd:", "fd.readlines() for line in lines: line = line.strip() if line.startswith(\"TV_\"+approx): try: tv =", "numpy as np import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))+\"/../meep_tomo\") from meep_tomo import extract,", "a reconstructed simulation approx: str Approximation to use, one of [\"radon\", \"born\", \"rytov\"]", "the exact focusing (the center of rotation in the simulation). This only makes", "def compute_metrices(tomo_path, approx, autofocus=False): \"\"\"Compute RMS and TV metrices for a MEEP-simulated ODT", "(normalized) This metric was used and described in Müller et. al, \"ODTbrain: a", "lines += \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut out", "= ((x-c)**2 + (y-c)**2) < c**2 elif len(a.shape) == 3: x = x.reshape(-1,", "with open(metr_file, \"r\") as fd: lines = fd.readlines() for line in lines: line", "Approximation to use, one of [\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`, perform", "+= \"SS_{} {:.15e}\\n\".format(approx, ss) resfdata.writelines(lines) return ss, tv def cutout(a): \"\"\"Cut out circle/sphere", "ref): \"\"\"Root mean square metric (normalized) This metric was used and described in", "tomography\" Bioinformatics 2015 \"\"\" grad = np.gradient(ri.real-ref) result = 0 for g in", "metr_file = tomo_path[:-4]+\"_metrices.txt\" npy_file = tomo_path else: raise ValueError(\"simulation must be a directory", "None: if npy_file: ri = np.load(npy_file) assert autofocus == False, \"`autofocus` has no", "str Approximation to use, one of [\"radon\", \"born\", \"rytov\"] autofocus: bool If `True`,", "-1, 1) z = x.reshape(1, -1, 1) zero = ((x-c)**2 + (y-c)**2 +", "= np.load(npy_file) assert autofocus == False, \"`autofocus` has no effect for .npy files!\"", "out circle/sphere from 2D/3D square/cubic array\"\"\" x = np.arange(a.shape[0]) c = a.shape[0] /", "metric_tv(ri, ref): \"\"\"Total variation metric (normalized) This metric was used and described in" ]
[ "to access the Androzoo Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have to", "download as input\") script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded ...\")", "csv def Download(inputfile): # The list of hashs can be cerated using the", "open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks = [] for row in", "apks.append(apk) print(apks) print(\"Hash files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the", "to provide the CSV file with the list of hash to download as", "pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have", "have to provide the CSV file with the list of hash to download", "readCSV = csv.reader(csvfile, delimiter=',') apks = [] for row in readCSV: apk =", "the CSV file with the list of hash to download as input\") script.add_argument('-i',", "input\") script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded ...\") print(\"Done\") if", "import csv def Download(inputfile): # The list of hashs can be cerated using", "script with open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks = [] for", "provide the CSV file with the list of hash to download as input\")", "as input\") script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded ...\") print(\"Done\")", "script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded ...\") print(\"Done\") if __name__", "Androzoo Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have to provide the CSV", "uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks) def main():", "list of hash to download as input\") script.add_argument('-i', '--input', required=True) args = script.parse_args()", "can be cerated using the VT_Labeling.py script with open(inputfile) as csvfile: readCSV =", "with the list of hash to download as input\") script.add_argument('-i', '--input', required=True) args", "access the Androzoo Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have to provide", "of hash to download as input\") script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input)", "...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks) def main(): script", "= argparse.ArgumentParser(description=\"You have to provide the CSV file with the list of hash", "argparse.ArgumentParser(description=\"You have to provide the CSV file with the list of hash to", "androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks) def main(): script =", "def main(): script = argparse.ArgumentParser(description=\"You have to provide the CSV file with the", "for row in readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded", "The list of hashs can be cerated using the VT_Labeling.py script with open(inputfile)", "as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks = [] for row in readCSV:", "be cerated using the VT_Labeling.py script with open(inputfile) as csvfile: readCSV = csv.reader(csvfile,", "pyandrozoo import csv def Download(inputfile): # The list of hashs can be cerated", "to download as input\") script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded", "args = script.parse_args() Download(args.input) print(\"apks downloaded ...\") print(\"Done\") if __name__ == '__main__': main()", "row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access", "the list of hash to download as input\") script.add_argument('-i', '--input', required=True) args =", "with open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks = [] for row", "VT_Labeling.py script with open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks = []", "CSV file with the list of hash to download as input\") script.add_argument('-i', '--input',", "using the VT_Labeling.py script with open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks", "hashs can be cerated using the VT_Labeling.py script with open(inputfile) as csvfile: readCSV", "Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have to provide the CSV file", "Download(inputfile): # The list of hashs can be cerated using the VT_Labeling.py script", "list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks) def", "delimiter=',') apks = [] for row in readCSV: apk = row[0] apks.append(apk) print(apks)", "row in readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded ...\")", "csv.reader(csvfile, delimiter=',') apks = [] for row in readCSV: apk = row[0] apks.append(apk)", "readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded ...\") androzoo =", "= pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You", "files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset') androzoo.get(apks)", "of hashs can be cerated using the VT_Labeling.py script with open(inputfile) as csvfile:", "= csv.reader(csvfile, delimiter=',') apks = [] for row in readCSV: apk = row[0]", "'--input', required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded ...\") print(\"Done\") if __name__ ==", "# The list of hashs can be cerated using the VT_Labeling.py script with", "= row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to", "cerated using the VT_Labeling.py script with open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',')", "<gh_stars>1-10 import pyandrozoo import csv def Download(inputfile): # The list of hashs can", "script = argparse.ArgumentParser(description=\"You have to provide the CSV file with the list of", "file with the list of hash to download as input\") script.add_argument('-i', '--input', required=True)", "in readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded ...\") androzoo", "print(\"Hash files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo Dataset')", "import pyandrozoo import csv def Download(inputfile): # The list of hashs can be", "apk = row[0] apks.append(apk) print(apks) print(\"Hash files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP", "[] for row in readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash files list", "csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks = [] for row in readCSV: apk", "hash to download as input\") script.add_argument('-i', '--input', required=True) args = script.parse_args() Download(args.input) print(\"apks", "def Download(inputfile): # The list of hashs can be cerated using the VT_Labeling.py", "main(): script = argparse.ArgumentParser(description=\"You have to provide the CSV file with the list", "= [] for row in readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash files", "the Androzoo Dataset') androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have to provide the", "the VT_Labeling.py script with open(inputfile) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') apks =", "apks = [] for row in readCSV: apk = row[0] apks.append(apk) print(apks) print(\"Hash", "androzoo.get(apks) def main(): script = argparse.ArgumentParser(description=\"You have to provide the CSV file with", "list of hashs can be cerated using the VT_Labeling.py script with open(inputfile) as", "print(apks) print(\"Hash files list uploaded ...\") androzoo = pyandrozoo.pyAndroZoo('<IP to access the Androzoo", "required=True) args = script.parse_args() Download(args.input) print(\"apks downloaded ...\") print(\"Done\") if __name__ == '__main__':" ]
[ "{ '$schema': 'http://json-schema.org/schema#', 'type': 'object', 'properties': { 'username': { \"type\": \"string\", 'minLength': 3,", "{ \"type\": \"string\", 'minLength': 3, 'maxLength': 100 }, 'password': { \"type\": \"string\", 'minLength':", "'minLength': 3, 'maxLength': 100 }, 'password': { \"type\": \"string\", 'minLength': 5, 'maxLength': 100", "{ \"type\": \"string\", 'minLength': 5, 'maxLength': 100 } }, \"additionalProperties\": False, 'required': ['username',", "'password': { \"type\": \"string\", 'minLength': 5, 'maxLength': 100 } }, \"additionalProperties\": False, 'required':", "3, 'maxLength': 100 }, 'password': { \"type\": \"string\", 'minLength': 5, 'maxLength': 100 }", "= { '$schema': 'http://json-schema.org/schema#', 'type': 'object', 'properties': { 'username': { \"type\": \"string\", 'minLength':", "REGISTER_SCHEMA = { '$schema': 'http://json-schema.org/schema#', 'type': 'object', 'properties': { 'username': { \"type\": \"string\",", "'properties': { 'username': { \"type\": \"string\", 'minLength': 3, 'maxLength': 100 }, 'password': {", "\"string\", 'minLength': 5, 'maxLength': 100 } }, \"additionalProperties\": False, 'required': ['username', 'password'] }", "'http://json-schema.org/schema#', 'type': 'object', 'properties': { 'username': { \"type\": \"string\", 'minLength': 3, 'maxLength': 100", "\"string\", 'minLength': 3, 'maxLength': 100 }, 'password': { \"type\": \"string\", 'minLength': 5, 'maxLength':", "'$schema': 'http://json-schema.org/schema#', 'type': 'object', 'properties': { 'username': { \"type\": \"string\", 'minLength': 3, 'maxLength':", "\"type\": \"string\", 'minLength': 5, 'maxLength': 100 } }, \"additionalProperties\": False, 'required': ['username', 'password']", "'type': 'object', 'properties': { 'username': { \"type\": \"string\", 'minLength': 3, 'maxLength': 100 },", "'maxLength': 100 }, 'password': { \"type\": \"string\", 'minLength': 5, 'maxLength': 100 } },", "'object', 'properties': { 'username': { \"type\": \"string\", 'minLength': 3, 'maxLength': 100 }, 'password':", "}, 'password': { \"type\": \"string\", 'minLength': 5, 'maxLength': 100 } }, \"additionalProperties\": False,", "'username': { \"type\": \"string\", 'minLength': 3, 'maxLength': 100 }, 'password': { \"type\": \"string\",", "{ 'username': { \"type\": \"string\", 'minLength': 3, 'maxLength': 100 }, 'password': { \"type\":", "100 }, 'password': { \"type\": \"string\", 'minLength': 5, 'maxLength': 100 } }, \"additionalProperties\":", "\"type\": \"string\", 'minLength': 3, 'maxLength': 100 }, 'password': { \"type\": \"string\", 'minLength': 5," ]
[ "'schedule: \"%s\",' % e['horario'] params += 'public_transport: \"%s\",' % e['transporte'] params += 'description:", "= '' params += 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo']) params", "% t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params", "WHERE t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\")", "'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO'] params += 'lat:", "gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth']", "%s\",' % e['DISTRITO'] params += 'lat: %s,' % e['Latitud'] params += 'lon: %s'", "WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\")", "(str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO'] params += 'lat: %s,' % e['Latitud']", "print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params += 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,'", "get_distances(self, lat, lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\"", "( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session() class", "c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog shit container", "'entity_id: %s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\"", "n.lon as longitude, n.container_type as container_type, collect(t.name) as trash_types, distance( point( {latitude: n.lat,", "for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\":", "import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ),", "= None puntos_limpios = [] def __init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self,", "'' params += 'container_type: \"clean_point\",' params += 'entity_id: %s,' % e['id_entidad'] params +=", "batteries container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in", "bs4 import BeautifulSoup from config import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s'", "l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % (", "% str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except:", "e['transporte'] params += 'description: \"%s\",' % e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad']", "e['localizacion'] params += 'city: \"%s\",' % e['localidad'] params += 'province: \"%s\",' % e['provincia']", "pdb.set_trace() print(\"Saved %d dog shit containers\" % i) # Link clean_point related trash", "e['direccion'] params += 'district:\" %s\",' % str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.'))", "{latitude: %s, longitude: %s} ) ) as point_distance ORDER BY point_distance LIMIT 5", "n.lat as latitude, n.lon as longitude, n.container_type as container_type, collect(t.name) as trash_types, distance(", "dog shit containers\" % i) # Link clean_point related trash types graph_session.run(\"\"\" MATCH", "AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat, lon,", "+= 'address: \"%s\",' % e['direccion'] params += 'district:\" %s\",' % str(e['distrito']) params +=", "% e['localidad'] params += 'province: \"%s\",' % e['provincia'] params += 'postal_code: %s,' %", "= [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types: query = \"\"\"", "e['distrito'] params += 'lat: %s,' % e['lat'] params += 'lon: %s' % e['lon']", "= \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries container with their", "print(\"Saved %d batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container):", "for e in entries: params = '' params += 'container_type: \"clean_point\",' params +=", "b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text,", "e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO']", "l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s", "with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND", "%s\",' % e['distrito'] params += 'lat: %s,' % e['lat'] params += 'lon: %s'", "+= 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",'", "\"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self): print(\"Initialized graph model\")", "MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c)", "AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries container", "WHERE t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\")", "), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file =", "import json import csv from neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth from", "in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link", "in entries: params = '' params += 'container_type: \"clean_point\",' params += 'entity_id: %s,'", "def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\" # Clean", "%s\",' % str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s'", "type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\"", "i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = ''", "'address: \"%s\",' % e['localizacion'] params += 'city: \"%s\",' % e['localidad'] params += 'province:", "e['provincia'] params += 'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio']", "as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'],", "graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE", "t,c \"\"\") # Link batteries container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType),", "type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\"", "container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"]", "b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{", "json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\" # Clean points for e in", "read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio(", "= '' params += 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada'] params", "trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type =", "(t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN", "t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def", "as latitude, n.lon as longitude, n.container_type as container_type, collect(t.name) as trash_types, distance( point(", "longitude: %s} ) ) as point_distance ORDER BY point_distance LIMIT 5 \"\"\" %", "(t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN", "%s,' % e['lat'] params += 'lon: %s' % e['lon'] graph_session.run(base_query % params) #", "+= 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO'] params +=", "def get_distances(self, lat, lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type =", "fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\" # Clean points", "% e['id_entidad'] params += 'name: \"%s\",' % e['nombre'] params += 'schedule: \"%s\",' %", "+= 'district:\" %s\",' % str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params +=", "p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat as latitude, n.lon as longitude, n.container_type", "params += 'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio'] params", "l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object):", ") class GraphModel(object): def __init__(self): print(\"Initialized graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json'))", "l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self):", "= \"MERGE (p:Containers{%s}) RETURN p;\" # Clean points for e in entries: params", "point( {latitude: n.lat, longitude:n.lon} ), point( {latitude: %s, longitude: %s} ) ) as", "} ) class GraphModel(object): def __init__(self): print(\"Initialized graph model\") def fill_model(self): entries =", "str(e['codigo']) params += 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params += 'address:", "config import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port']", "WHERE n.container_type = \"%s\" RETURN n.lat as latitude, n.lon as longitude, n.container_type as", "batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.'))", "params += 'neighborhood: \"%s\",' % e['barrio'] params += 'district:\" %s\",' % e['distrito'] params", "self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l in", "( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text,", "e['DISTRITO'] params += 'lat: %s,' % e['Latitud'] params += 'lon: %s' % e['Longitud']", "%s,' % e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion'] params += 'city: \"%s\",'", "+= 'entity_id: %s,' % e['id_entidad'] params += 'name: \"%s\",' % e['nombre'] params +=", "# Link dog shit container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers)", "model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\" #", "(p:Containers{%s}) RETURN p;\" # Clean points for e in entries: params = ''", "% e['localizacion'] params += 'city: \"%s\",' % e['localidad'] params += 'province: \"%s\",' %", "l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text,", "'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\"", "shit containers\" % i) # Link clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType),", "trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas list_batteries_container", "e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion'] params += 'city: \"%s\",' % e['localidad']", "+= 'district:\" %s\",' % e['DISTRITO'] params += 'lat: %s,' % e['Latitud'] params +=", "(c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c", "% (str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO'] params += 'lat: %s,' %", "params += 'entity_id: %s,' % str(e['codigo']) params += 'name: \"%s-%s %s\",' % (str(i),", "\"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\":", "\"%s\" RETURN n.lat as latitude, n.lon as longitude, n.container_type as container_type, collect(t.name) as", "csv from neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth from bs4 import BeautifulSoup", "%s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\":", "\"%s\",' % e['barrio'] params += 'district:\" %s\",' % e['distrito'] params += 'lat: %s,'", "gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios", "graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios = [] def __init__(self, file_path): self.file", "params) # Trash types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [", "graph_session.run(base_query % params) # Trash types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types", "class PuntosLimpiosExtractor(object): file = None puntos_limpios = [] def __init__(self, file_path): self.file =", "'entity_id: %s,' % e['id_entidad'] params += 'name: \"%s\",' % e['nombre'] params += 'schedule:", "%s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio'] params += 'district:\" %s\",'", "for t in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t)", "params += 'entity_id: %s,' % e['id_entidad'] params += 'name: \"%s\",' % e['nombre'] params", "[\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries", "% i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params =", "\"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\":", "'' params += 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada'] params +=", "= graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios = [] def __init__(self, file_path):", "try: graph_session.run(base_query % params) except: import pdb; pdb.set_trace() print(\"Saved %d dog shit containers\"", "Link clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"]", "graph_session.run(base_query % params) except: import pdb; pdb.set_trace() print(\"Saved %d dog shit containers\" %", "l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self): print(\"Initialized graph", "+= 'name: \"%s\",' % e['nombre'] params += 'schedule: \"%s\",' % e['horario'] params +=", "\"\"\") def get_distances(self, lat, lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type", "params += 'district:\" %s\",' % e['distrito'] params += 'lat: %s,' % e['lat'] params", "% e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion'] params += 'city: \"%s\",' %", "\"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for", "+= 'lat: %s,' % e['Latitud'] params += 'lon: %s' % e['Longitud'] graph_session.run(base_query %", "\"%s\",' % e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad'] params += 'address: \"%s\",'", "# Clean points for e in entries: params = '' params += 'container_type:", "'entity_id: %s,' % str(e['codigo']) params += 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion'])", "l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\":", "), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text,", "__init__(self): print(\"Initialized graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s})", "Trash types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash',", "as point_distance ORDER BY point_distance LIMIT 5 \"\"\" % ( container_type, str(lat), str(lon)", "'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"})", "'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo']) params += 'name: \"%s-%s %s\",'", "'district:\" %s\",' % e['distrito'] params += 'lat: %s,' % e['lat'] params += 'lon:", "from neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth from bs4 import BeautifulSoup from", "= json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params +=", "+= 'lon: %s' % e['lon'] graph_session.run(base_query % params) # Trash types trash_types =", "entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\" # Clean points for", "str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import", "+= 'city: \"%s\",' % e['localidad'] params += 'province: \"%s\",' % e['provincia'] params +=", "'city: \"%s\",' % e['localidad'] params += 'province: \"%s\",' % e['provincia'] params += 'postal_code:", "l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text,", "params += 'address: \"%s\",' % e['localizacion'] params += 'city: \"%s\",' % e['localidad'] params", "(c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c", "params = '' params += 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo'])", "params += 'district:\" %s\",' % e['DISTRITO'] params += 'lat: %s,' % e['Latitud'] params", "for i, e in enumerate(list_batteries_container): params = '' params += 'container_type: \"battery_recycling_point\",' params", ") ) as point_distance ORDER BY point_distance LIMIT 5 \"\"\" % ( container_type,", ") graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios = [] def", "'container_type: \"clean_point\",' params += 'entity_id: %s,' % e['id_entidad'] params += 'name: \"%s\",' %", "params += 'container_type: \"clean_point\",' params += 'entity_id: %s,' % e['id_entidad'] params += 'name:", "types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point'", "\"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat, lon, container_type): query =", "\"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text,", "%d batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'),", "dog shit container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name", ") as point_distance ORDER BY point_distance LIMIT 5 \"\"\" % ( container_type, str(lat),", "in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self,", "\"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo']) params += 'name: \"%s-%s %s\",' %", "= \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json'))", "+= 'entity_id: %s,' % str(e['codigo']) params += 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']),", "\"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self): print(\"Initialized", "GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session", "**kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l", "lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text,", "as trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ), point( {latitude: %s, longitude: %s}", "'description: \"%s\",' % e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad'] params += 'address:", "'neighborhood: \"%s\",' % e['barrio'] params += 'district:\" %s\",' % e['distrito'] params += 'lat:", "l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } )", "print(\"Saved %d dog shit containers\" % i) # Link clean_point related trash types", "% e['transporte'] params += 'description: \"%s\",' % e['descripcion'] params += 'accesibility: %s,' %", "related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type", "'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types:", "\"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\":", "'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types: query", "lat, lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN", "container_type, collect(t.name) as trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ), point( {latitude: %s,", "e['barrio'] params += 'district:\" %s\",' % e['distrito'] params += 'lat: %s,' % e['lat']", "print(\"Initialized graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN", "\"%s\",' % e['direccion'] params += 'district:\" %s\",' % str(e['distrito']) params += 'lat: %s,'", "neo4j.v1 import basic_auth from bs4 import BeautifulSoup from config import GRAPH_DATABASE as gdb", "str(e['codigo']), e['direccion']) params += 'address: \"%s\",' % e['direccion'] params += 'district:\" %s\",' %", "= \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog shit container with", "in enumerate(list_batteries_container): params = '' params += 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,'", "PuntosLimpiosExtractor(object): file = None puntos_limpios = [] def __init__(self, file_path): self.file = open(file_path).read()", "distance( point( {latitude: n.lat, longitude:n.lon} ), point( {latitude: %s, longitude: %s} ) )", "\"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" %", "list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params", "graph_session.run(base_query % params) print(\"Saved %d batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for", "% e['distrito'] params += 'lat: %s,' % e['lat'] params += 'lon: %s' %", "def __init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object", "\"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text,", "[ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types: query = \"\"\" MERGE", "file = None puntos_limpios = [] def __init__(self, file_path): self.file = open(file_path).read() def", "MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat, lon, container_type): query = \"\"\"", "e['id_entidad'] params += 'name: \"%s\",' % e['nombre'] params += 'schedule: \"%s\",' % e['horario']", "'lat: %s,' % e['lat'] params += 'lon: %s' % e['lon'] graph_session.run(base_query % params)", "json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params = '' params += 'container_type: \"battery_recycling_point\",'", "[\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog", "(t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog shit container with their trash type", "%s,' % str(e['codigo']) params += 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params", "\"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class", "+= 'province: \"%s\",' % e['provincia'] params += 'postal_code: %s,' % e['codigo_postal'] params +=", "params += 'entity_id: %s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params", "\"\"\") # Link dog shit container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType),", "% (str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",' % e['direccion'] params += 'district:\"", "query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas list_batteries_container =", "e['longitud'].replace(',','.')) params = '' params += 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' %", "**{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text,", "collect(t.name) as trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ), point( {latitude: %s, longitude:", "in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas", "graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\"", "% e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",' %", "= json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params = '' params += 'container_type:", "\"%s\",' % e['horario'] params += 'public_transport: \"%s\",' % e['transporte'] params += 'description: \"%s\",'", "graph_session.run(query % t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container):", "%s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries container\" % i) list_dog_shit_container", "enumerate(list_batteries_container): params = '' params += 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' %", "% e['lat'] params += 'lon: %s' % e['lon'] graph_session.run(base_query % params) # Trash", "e['Latitud'] params += 'lon: %s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries", "'province: \"%s\",' % e['provincia'] params += 'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood:", "[\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat,", "'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query %", "e['lat'] params += 'lon: %s' % e['lon'] graph_session.run(base_query % params) # Trash types", "list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params = '' params +=", "params) except: import pdb; pdb.set_trace() print(\"Saved %d dog shit containers\" % i) #", "% e['horario'] params += 'public_transport: \"%s\",' % e['transporte'] params += 'description: \"%s\",' %", "= [] def __init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def", "lista_puntos = b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text,", "RETURN t,c \"\"\") def get_distances(self, lat, lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType)", "graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type = \"dog_shit_trash\" MERGE", "+= 'lat: %s,' % e['lat'] params += 'lon: %s' % e['lon'] graph_session.run(base_query %", "% params) # Trash types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types =", "container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat as", "self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser')", "= json.load(open('puntos_limpios.json')) base_query = \"MERGE (p:Containers{%s}) RETURN p;\" # Clean points for e", "e['direccion']) params += 'address: \"%s\",' % e['direccion'] params += 'district:\" %s\",' % str(e['distrito'])", "\"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO'] params += 'lat: %s,'", "\"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",' % e['direccion'] params", "Link dog shit container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE", "\"clean_point\",' params += 'entity_id: %s,' % e['id_entidad'] params += 'name: \"%s\",' % e['nombre']", "entries: params = '' params += 'container_type: \"clean_point\",' params += 'entity_id: %s,' %", "% str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import pdb; pdb.set_trace() print(\"Saved %d dog", "as longitude, n.container_type as container_type, collect(t.name) as trash_types, distance( point( {latitude: n.lat, longitude:n.lon}", "%s,' % e['id_entidad'] params += 'name: \"%s\",' % e['nombre'] params += 'schedule: \"%s\",'", "in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params += 'container_type: \"dog_shit_trash\",' params +=", "None puntos_limpios = [] def __init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs):", "%d dog shit containers\" % i) # Link clean_point related trash types graph_session.run(\"\"\"", "+= 'address: \"%s\",' % e['localizacion'] params += 'city: \"%s\",' % e['localidad'] params +=", "latitude, n.lon as longitude, n.container_type as container_type, collect(t.name) as trash_types, distance( point( {latitude:", "params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",' % e['DISTRITO'] params", "% str(e['codigo']) params += 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params +=", "= GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) )", "RETURN t,c \"\"\") # Link dog shit container with their trash type graph_session.run(\"\"\"", "\"%s\",' % e['localizacion'] params += 'city: \"%s\",' % e['localidad'] params += 'province: \"%s\",'", "= ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for", "base_query = \"MERGE (p:Containers{%s}) RETURN p;\" # Clean points for e in entries:", "% e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio'] params += 'district:\" %s\",' %", "% ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\":", "p;\" # Clean points for e in entries: params = '' params +=", "\"%s\",' % e['nombre'] params += 'schedule: \"%s\",' % e['horario'] params += 'public_transport: \"%s\",'", "e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio'] params += 'district:\" %s\",' % e['distrito']", "n.container_type = \"%s\" RETURN n.lat as latitude, n.lon as longitude, n.container_type as container_type,", "%s} ) ) as point_distance ORDER BY point_distance LIMIT 5 \"\"\" % (", "RETURN t,c \"\"\") # Link batteries container with their trash type graph_session.run(\"\"\" MATCH", "% e['lon'] graph_session.run(base_query % params) # Trash types trash_types = ['furniture', 'electronics', 'batteries',", "l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text,", "as container_type, collect(t.name) as trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ), point( {latitude:", "'lat: %s,' % e['Latitud'] params += 'lon: %s' % e['Longitud'] graph_session.run(base_query % params)", "\"MERGE (p:Containers{%s}) RETURN p;\" # Clean points for e in entries: params =", "params += 'lon: %s' % e['lon'] graph_session.run(base_query % params) # Trash types trash_types", "MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c)", "AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog shit", "\"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\":", "+= 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",' %", "i, e in enumerate(list_batteries_container): params = '' params += 'container_type: \"battery_recycling_point\",' params +=", "'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",' %", "l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text,", "basic_auth from bs4 import BeautifulSoup from config import GRAPH_DATABASE as gdb graph_driver =", "\"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\":", "+= 'lon: %s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries container\" %", "params += 'city: \"%s\",' % e['localidad'] params += 'province: \"%s\",' % e['provincia'] params", "(t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries container with their trash type graph_session.run(\"\"\"", "(t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e", "= b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\":", "'lon: %s' % e['lon'] graph_session.run(base_query % params) # Trash types trash_types = ['furniture',", "'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio'] params += 'district:\"", "str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import pdb; pdb.set_trace() print(\"Saved %d dog shit", "'html.parser') lista_puntos = b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\":", "in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\":", "= \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat, lon, container_type): query", "longitude, n.container_type as container_type, collect(t.name) as trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ),", "points for e in entries: params = '' params += 'container_type: \"clean_point\",' params", "= BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\":", "'public_transport: \"%s\",' % e['transporte'] params += 'description: \"%s\",' % e['descripcion'] params += 'accesibility:", "from config import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'],", "def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l in lista_puntos:", "graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios = [] def __init__(self,", "\"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog shit container with their", "c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries container with", "'district:\" %s\",' % str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon:", "e['horario'] params += 'public_transport: \"%s\",' % e['transporte'] params += 'description: \"%s\",' % e['descripcion']", "for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params += 'container_type: \"dog_shit_trash\",'", "+= 'public_transport: \"%s\",' % e['transporte'] params += 'description: \"%s\",' % e['descripcion'] params +=", "open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos =", "%s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text,", "% i) # Link clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE", "+= 'entity_id: %s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params +=", "% e['barrio'] params += 'district:\" %s\",' % e['distrito'] params += 'lat: %s,' %", "'battery_recycling_point' ] for t in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query", "t in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) #", "class GraphModel(object): def __init__(self): print(\"Initialized graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query", "'name: \"%s\",' % e['nombre'] params += 'schedule: \"%s\",' % e['horario'] params += 'public_transport:", "shit container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in", "= '' params += 'container_type: \"clean_point\",' params += 'entity_id: %s,' % e['id_entidad'] params", "l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ),", "\"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def", "+= 'accesibility: %s,' % e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion'] params +=", "'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session =", "+= 'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query", "# Link clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in", "t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") #", "(t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat, lon, container_type): query = \"\"\" MATCH", "%s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params)", "= \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat as latitude, n.lon", "\"\"\") # Link batteries container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers)", "params) print(\"Saved %d batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in", "l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text,", "import csv from neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth from bs4 import", "\"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries container with their trash", "container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in trash_types: query =", "graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] )", "def __init__(self): print(\"Initialized graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query = \"MERGE", "% e['provincia'] params += 'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",' %", "+= 'neighborhood: \"%s\",' % e['barrio'] params += 'district:\" %s\",' % e['distrito'] params +=", "clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND", "% e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad'] params += 'address: \"%s\",' %", "point( {latitude: %s, longitude: %s} ) ) as point_distance ORDER BY point_distance LIMIT", "%s\",' % (str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",' % e['direccion'] params +=", "\"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\":", "'' params += 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo']) params +=", "params += 'address: \"%s\",' % e['direccion'] params += 'district:\" %s\",' % str(e['distrito']) params", "json import csv from neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth from bs4", "%s,' % e['Latitud'] params += 'lon: %s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved", "l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text,", "enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params += 'container_type: \"dog_shit_trash\",' params += 'entity_id:", "gdb['auth'] ) ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios =", "i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params += 'container_type: \"dog_shit_trash\",' params", "t,c \"\"\") # Link dog shit container with their trash type graph_session.run(\"\"\" MATCH", "\"%s\",' % e['localidad'] params += 'province: \"%s\",' % e['provincia'] params += 'postal_code: %s,'", "%s, longitude: %s} ) ) as point_distance ORDER BY point_distance LIMIT 5 \"\"\"", "% ( gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session()", "ORDER BY point_distance LIMIT 5 \"\"\" % ( container_type, str(lat), str(lon) ) return", "\"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\" % ( l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text", "auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file = None", "\"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self): print(\"Initialized graph model\") def fill_model(self):", "% params) except: import pdb; pdb.set_trace() print(\"Saved %d dog shit containers\" % i)", "def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido')", "e in entries: params = '' params += 'container_type: \"clean_point\",' params += 'entity_id:", "their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type", "), point( {latitude: %s, longitude: %s} ) ) as point_distance ORDER BY point_distance", "\"%s\",' % e['provincia'] params += 'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",'", "% e['DISTRITO'] params += 'lat: %s,' % e['Latitud'] params += 'lon: %s' %", "trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ), point( {latitude: %s, longitude: %s} )", "params += 'schedule: \"%s\",' % e['horario'] params += 'public_transport: \"%s\",' % e['transporte'] params", "% e['Latitud'] params += 'lon: %s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d", "params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.')) try:", "i) # Link clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name", "%s' % e['lon'] graph_session.run(base_query % params) # Trash types trash_types = ['furniture', 'electronics',", "MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link dog shit container with their trash", "Clean points for e in entries: params = '' params += 'container_type: \"clean_point\",'", "% str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s' %", "'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",' % e['direccion']", "MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat as latitude, n.lon as longitude,", "\"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s", "from neo4j.v1 import basic_auth from bs4 import BeautifulSoup from config import GRAPH_DATABASE as", "\"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\":", "'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import pdb; pdb.set_trace() print(\"Saved", "] for t in trash_types: query = \"\"\" MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query %", "from bs4 import BeautifulSoup from config import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver(", "e['localidad'] params += 'province: \"%s\",' % e['provincia'] params += 'postal_code: %s,' % e['codigo_postal']", "params += 'lat: %s,' % e['Latitud'] params += 'lon: %s' % e['Longitud'] graph_session.run(base_query", "+= 'description: \"%s\",' % e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad'] params +=", "params += 'name: \"%s-%s %s\",' % (str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",'", "t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params =", "puntos_limpios = [] def __init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs)", "(str(i), str(e['codigo']), e['direccion']) params += 'address: \"%s\",' % e['direccion'] params += 'district:\" %s\",'", "\"%s\",' % e['transporte'] params += 'description: \"%s\",' % e['descripcion'] params += 'accesibility: %s,'", "params += 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo']) params += 'name:", "types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\"", "+= 'schedule: \"%s\",' % e['horario'] params += 'public_transport: \"%s\",' % e['transporte'] params +=", "(t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN", "params += 'lat: %s,' % e['lat'] params += 'lon: %s' % e['lon'] graph_session.run(base_query", "gdb['host'], gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object):", "GraphDatabase from neo4j.v1 import basic_auth from bs4 import BeautifulSoup from config import GRAPH_DATABASE", "GraphModel(object): def __init__(self): print(\"Initialized graph model\") def fill_model(self): entries = json.load(open('puntos_limpios.json')) base_query =", "neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth from bs4 import BeautifulSoup from config", "t,c \"\"\") def get_distances(self, lat, lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE", "%s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada']) params += 'district:\" %s\",'", "trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ]", "\"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat as latitude, n.lon as", "# marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params = ''", "BY point_distance LIMIT 5 \"\"\" % ( container_type, str(lat), str(lon) ) return graph_session.run(query)", ") ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file = None puntos_limpios = []", "+= 'container_type: \"clean_point\",' params += 'entity_id: %s,' % e['id_entidad'] params += 'name: \"%s\",'", "self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text, \"nombre\": l.select_one('atributo[nombre=\"NOMBRE\"]').text, \"horario\": l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\":", "'address: \"%s\",' % e['direccion'] params += 'district:\" %s\",' % str(e['distrito']) params += 'lat:", "= open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos", "import pdb; pdb.set_trace() print(\"Saved %d dog shit containers\" % i) # Link clean_point", "gdb['port'] ), auth=basic_auth( gdb['user'], gdb['auth'] ) ) graph_session = graph_driver.session() class PuntosLimpiosExtractor(object): file", "e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json'))", "__init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object =", "n.lat, longitude:n.lon} ), point( {latitude: %s, longitude: %s} ) ) as point_distance ORDER", "RETURN p;\" # Clean points for e in entries: params = '' params", "\"\"\" graph_session.run(query % t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in", "'district:\" %s\",' % e['DISTRITO'] params += 'lat: %s,' % e['Latitud'] params += 'lon:", "container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"]", "c.container_type = \"dog_shit_trash\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") def get_distances(self, lat, lon, container_type):", "% e['nombre'] params += 'schedule: \"%s\",' % e['horario'] params += 'public_transport: \"%s\",' %", "l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self): print(\"Initialized graph model\") def fill_model(self): entries", "params = '' params += 'container_type: \"clean_point\",' params += 'entity_id: %s,' % e['id_entidad']", "except: import pdb; pdb.set_trace() print(\"Saved %d dog shit containers\" % i) # Link", "\"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, }", "+= 'district:\" %s\",' % e['distrito'] params += 'lat: %s,' % e['lat'] params +=", "pdb; pdb.set_trace() print(\"Saved %d dog shit containers\" % i) # Link clean_point related", "trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type =", "marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i, e in enumerate(list_batteries_container): params = '' params", "= \"%s\" RETURN n.lat as latitude, n.lon as longitude, n.container_type as container_type, collect(t.name)", "RETURN n.lat as latitude, n.lon as longitude, n.container_type as container_type, collect(t.name) as trash_types,", "e['nombre'] params += 'schedule: \"%s\",' % e['horario'] params += 'public_transport: \"%s\",' % e['transporte']", "e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion']", "'accesibility: %s,' % e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion'] params += 'city:", "params = '' params += 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada']", "params += 'public_transport: \"%s\",' % e['transporte'] params += 'description: \"%s\",' % e['descripcion'] params", "+= 'container_type: \"dog_shit_trash\",' params += 'entity_id: %s,' % str(e['codigo']) params += 'name: \"%s-%s", "containers\" % i) # Link clean_point related trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers)", "query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat as latitude,", "longitude:n.lon} ), point( {latitude: %s, longitude: %s} ) ) as point_distance ORDER BY", "params += 'container_type: \"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada'] params += 'name:", "n.container_type as container_type, collect(t.name) as trash_types, distance( point( {latitude: n.lat, longitude:n.lon} ), point(", "<reponame>jasonjimnz/dondelotiro-api import json import csv from neo4j.v1 import GraphDatabase from neo4j.v1 import basic_auth", "['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t", "params += 'description: \"%s\",' % e['descripcion'] params += 'accesibility: %s,' % e['accesibilidad'] params", "lon, container_type): query = \"\"\" MATCH p=(n:Containers)<-[r:CAN_BE_DEPLOYED_IN]-(t:TrashType) WHERE n.container_type = \"%s\" RETURN n.lat", "l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text, } ) class GraphModel(object): def __init__(self): print(\"Initialized graph model\") def", "% params) print(\"Saved %d batteries container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e", "e in enumerate(list_batteries_container): params = '' params += 'container_type: \"battery_recycling_point\",' params += 'entity_id:", "Link batteries container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name", "t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") #", "[] def __init__(self, file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self):", "'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point', 'dog_shit_trash', 'battery_recycling_point' ] for t in", "import basic_auth from bs4 import BeautifulSoup from config import GRAPH_DATABASE as gdb graph_driver", "% e['direccion'] params += 'district:\" %s\",' % str(e['distrito']) params += 'lat: %s,' %", "MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c)", "params += 'district:\" %s\",' % str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params", "(c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c", "point_distance ORDER BY point_distance LIMIT 5 \"\"\" % ( container_type, str(lat), str(lon) )", "% e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries container\" % i) list_dog_shit_container =", "BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for l in lista_puntos: self.load_punto_limpio( **{ \"id_entidad\": l.select_one('atributo[nombre=\"ID-ENTIDAD\"]').text,", "import GraphDatabase from neo4j.v1 import basic_auth from bs4 import BeautifulSoup from config import", "%s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import pdb; pdb.set_trace() print(\"Saved %d", "params += 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import pdb;", "file_path): self.file = open(file_path).read() def load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file,", "in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type = \"clean_point\" MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link", "+= 'postal_code: %s,' % e['codigo_postal'] params += 'neighborhood: \"%s\",' % e['barrio'] params +=", "with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND", "l.select_one('atributo[nombre=\"HORARIO\"]').text, \"transporte\": l.select_one('atributo[nombre=\"TRANSPORTE\"]').text, \"descripcion\": l.select_one('atributo[nombre=\"DESCRIPCION\"]').text, \"accesibilidad\": l.select_one('atributo[nombre=\"ACCESIBILIDAD\"]').text, \"content-url\": l.select_one('atributo[nombre=\"CONTENT-URL\"]').text, \"localizacion\": \"%s %s %s\"", "params += 'province: \"%s\",' % e['provincia'] params += 'postal_code: %s,' % e['codigo_postal'] params", "trash types graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"furniture\",\"electronics\",\"batteries\"] AND c.container_type =", "{latitude: n.lat, longitude:n.lon} ), point( {latitude: %s, longitude: %s} ) ) as point_distance", "graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"batteries\"] AND c.container_type = \"battery_recycling_point\" MERGE", "container\" % i) list_dog_shit_container = json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params", "+= 'lon: %s' % str(e['longitud'].replace(',','.')) try: graph_session.run(base_query % params) except: import pdb; pdb.set_trace()", "import BeautifulSoup from config import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' %", "str(e['distrito']) params += 'lat: %s,' % str(e['latitud'].replace(',','.')) params += 'lon: %s' % str(e['longitud'].replace(',','.'))", "e['lon'] graph_session.run(base_query % params) # Trash types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit']", "params += 'name: \"%s\",' % e['nombre'] params += 'schedule: \"%s\",' % e['horario'] params", "params += 'accesibility: %s,' % e['accesibilidad'] params += 'address: \"%s\",' % e['localizacion'] params", "load_punto_limpio(self, **kwargs): self.puntos_limpios.append(kwargs) def read_puntos_limpios_xml(self): b_object = BeautifulSoup(self.file, 'html.parser') lista_puntos = b_object.find_all('contenido') for", "params += 'lon: %s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries container\"", "l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text,", "BeautifulSoup from config import GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % (", "# Link batteries container with their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE", "\"battery_recycling_point\",' params += 'entity_id: %s,' % e['Parada'] params += 'name: \"bus_stop_%s-%s\",' % (str(i),e['Parada'])", "json.load(open('Papeleras_con_expendedor_de_bolsas.json')) for i,e in enumerate(list_dog_shit_container): print(e['latitud'].replace(',','.'), e['longitud'].replace(',','.')) params = '' params += 'container_type:", "l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\": l.select_one('atributo[nombre=\"DISTRITO\"]').text, \"coord_x\": l.select_one('atributo[nombre=\"COORDENADA-X\"]').text, \"coord_y\": l.select_one('atributo[nombre=\"COORDENADA-Y\"]').text, \"lat\": l.select_one('atributo[nombre=\"LATITUD\"]').text, \"lon\": l.select_one('atributo[nombre=\"LONGITUD\"]').text,", "GRAPH_DATABASE as gdb graph_driver = GraphDatabase.driver( 'bolt://%s:%s' % ( gdb['host'], gdb['port'] ), auth=basic_auth(", "# Trash types trash_types = ['furniture', 'electronics', 'batteries', 'dog_shit'] container_types = [ 'clean_point',", "MERGE (t)-[:CAN_BE_DEPLOYED_IN]->(c) RETURN t,c \"\"\") # Link batteries container with their trash type", "their trash type graph_session.run(\"\"\" MATCH (t:TrashType), (c:Containers) WHERE t.name in [\"dog_shit\"] AND c.container_type", "l.select_one('atributo[nombre=\"CLASE-VIAL\"]').text, l.select_one('atributo[nombre=\"NOMBRE-VIA\"]').text, l.select_one('atributo[nombre=\"NUM\"]').text ), \"localidad\": l.select_one('atributo[nombre=\"LOCALIDAD\"]').text, \"provincia\": l.select_one('atributo[nombre=\"PROVINCIA\"]').text, \"codigo_postal\": l.select_one('atributo[nombre=\"CODIGO-POSTAL\"]').text, \"barrio\": l.select_one('atributo[nombre=\"BARRIO\"]').text, \"distrito\":", "'lon: %s' % e['Longitud'] graph_session.run(base_query % params) print(\"Saved %d batteries container\" % i)", "MERGE (t:TrashType{name:\"%s\"}) \"\"\" graph_session.run(query % t) # marquesinas list_batteries_container = json.load(open('Marquesinas_contenedores_pilas_2017.json')) for i," ]
[ "Default implementation is based on odo. \"\"\" if not new_type_name: available = [key[1]", "ConversionUnknown(\"Unknown conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self,", "self.columns if isinstance(col, str) } global_dict = { \"nan\" : np.nan, \"inf\" :", "new_type_name = new_type_name, new_type_name.type_name else: if not new_type_name in DataObject.registered_types: return False new_type", "data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]: return", "uri: URI in the odo sense This method should be overridden in daughter", "if wrap: return DataObject.from_native(result, source=self) else: return result def where(self, condition: str, sql:", "type It is not possible to proxy slots, but it is possible to", "UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod def", "= None): \"\"\"Decorator to apply on DataObject descendants. :param wrap: Whether to wrap", "registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod def register_type(default: bool = False)", "DataObject) -> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]: return [", "if isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name else: if not new_type_name in", "it is possible to inherit proxied slots :-) \"\"\" import boadata def wrapper(boadata_type):", "daughter classes. When called as DataObject.from_uri, it first tries to find an appropriate", ":param condition: a valid condition returning boolean :param sql: if True, the condition", "if not new_type_name: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] ==", "method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper def evaluate(self, expression: str, wrap:", "ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else: return result def where(self,", "of condition has to be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self,", "interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\"", "dataset. :param condition: a valid condition returning boolean :param sql: if True, the", "str): method_names = [methods] else: method_names = methods def make_method(method_name): def proxied_method(self, *args,", "representable by boadata. :type registered_types: OrderedDict[str, type] :param source: From where we obtained", "dataset. :param expression: a valid expression :param wrap: whether to convert back to", "List[Tuple[str, str]]: return [ key for (key, conversion) in DataConversion.registered_conversions.items() if key[0] ==", "Whether to try to convert to self's class :param through: if None, done", "**kwargs) -> DataObject: \"\"\"\"Create an object of this class from an URI. :param", "local_dict = { col : self[col].inner_data for col in self.columns if isinstance(col, str)", "interpret native object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object,", "result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else: return result", "return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\" if", "boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod def accepts_uri(cls,", "where we obtained the object (kept as weak reference) It is necessary to", "def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] =", "hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic", "from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs)", "not wrap: return result elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try:", "bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name else: if", "blinker.Signal(\"changed\") # For dynamic data objects real_type: ClassVar[Type] = None type_name: ClassVar[str] =", "**kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\"", "DataObject: last_exception = None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri,", "'DataObject' = None, **kwargs): if self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type", "proxy etc., consider a clever proxy attribute) method = getattr(self.inner_data, method_name) result =", "= {key: boadata.unwrap(value) for key, value in kwargs.items()} if through: native_method = getattr(self.convert(through),", "def shape(self) -> Tuple[int, ...]: \"\"\"Shape of the data. Example: Shape of the", "\"\"\"DataObject methods related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) -> bool: return", "None): \"\"\"Decorator to apply on DataObject descendants. :param wrap: Whether to wrap result", "True, unwrap_args: bool = True, same_class: bool = True, through: Optional[type] = None):", "**kwargs) if not wrap: return result elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result)", "raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs) ->", "discovers conversion in the form of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if", "Exception as exc: last_exception = exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret", "conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression, **kwargs) -", "= None, uri: str = None, source: 'DataObject' = None, **kwargs): if self.real_type", "of inner data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data =", "\", \".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods: - add_column(key,", "return result elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result)", "\"\"\"Do calculation on columns of the dataset. :param expression: a valid expression :param", "wrapper(boadata_type): if isinstance(methods, str): method_names = [methods] else: method_names = methods def make_method(method_name):", "or return the native result Based on numexpr library \"\"\" local_dict = {", "has to be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str,", "odo sense This method should be overridden in daughter classes. When called as", "return self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that", "class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that contains data representable by boadata.", "\"\"\" import boadata def wrapper(boadata_type): if isinstance(methods, str): method_names = [methods] else: method_names", "DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return", "for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0}", "Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry:", "return [ key for (key, conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name and", "DataObject.from_native handler for the real type of the data object. Automatically discovers conversion", "serve as DataObject.from_native handler for the real type of the data object. Automatically", "False @classmethod def from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create an object of", "classes. When called as DataObject.from_uri, it first tries to find an appropriate class", "InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ ))", "class _DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression, **kwargs) - based on evaluate", "the same object. Default implementation is based on odo. \"\"\" if not new_type_name:", "numpy as np from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import", ":param uri: URI in the odo sense This method should be overridden in", "if not new_type_name in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type):", "boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of the", "DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion:", "else: return None @property def columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings,", "keep all arguments keyword (enforceable in Python 3). ''' def __init__(self, inner_data: Any", "conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available = [key[1] for key in", "= self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has", "np from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import Any, ClassVar,", "return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression, **kwargs)", "the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs)", "if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property def size(self) -> int:", "import reduce reduce(mul, self.shape, 1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype", "- add_column(key, expression, **kwargs) - based on evaluate - \"\"\" @property def shape(self)", "slots :-) \"\"\" import boadata def wrapper(boadata_type): if isinstance(methods, str): method_names = [methods]", "RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type() \") def wrap(boadata_type: type) -> type:", "if isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available", "type_name: ClassVar[str] = None @property def title(self) -> str: return repr(self) def __repr__(self):", "() @property def ndim(self) -> int: \"\"\"Dimensionality of the data. Example: A 4x3", "wrap: bool = True, unwrap_args: bool = True, same_class: bool = True, through:", "raise ConversionUnknown(\"Unknown conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return", "source=self) else: return result def where(self, condition: str, sql: bool = False) ->", "*args, **kwargs): \"\"\"Apply a method defined on the native object. If possible, converts", "is based on odo. \"\"\" if not new_type_name: available = [key[1] for key", "import Any, ClassVar, List, Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class", "UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] =", "It is not possible to proxy slots, but it is possible to inherit", "through a named type It is not possible to proxy slots, but it", "return DataObject.from_native(result, source=self) else: return result def where(self, condition: str, sql: bool =", "UnsupportedDataOperationError(\"The result of condition has to be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self)", "that contains data representable by boadata. :type registered_types: OrderedDict[str, type] :param source: From", "def ndim(self) -> int: \"\"\"Dimensionality of the data. Example: A 4x3 matrix has", "'''A basic object that contains data representable by boadata. :type registered_types: OrderedDict[str, type]", "return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that contains data representable", "result def where(self, condition: str, sql: bool = False) -> 'DataObject': \"\"\"Choose a", "'{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\" :param", "From where we obtained the object (kept as weak reference) It is necessary", "\"\"\"Apply a method defined on the native object. If possible, converts the result", "DataObject: if isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise", "== self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \",", "conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self)", "Default variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return", "of a dataset. :param condition: a valid condition returning boolean :param sql: if", "col in self.columns if isinstance(col, str) } global_dict = { \"nan\" : np.nan,", "as ne import numpy as np from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING:", "-> Callable[[type], type]: \"\"\"Decorator that registers the data type :param default: Whether to", "str) -> bool: return False @classmethod def from_uri(cls, uri: str, **kwargs) -> DataObject:", "self.uri) @staticmethod def proxy_methods(methods, wrap: bool = True, unwrap_args: bool = True, same_class:", "isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available =", "For dynamic data objects real_type: ClassVar[Type] = None type_name: ClassVar[str] = None @property", "return the native result Based on numexpr library \"\"\" local_dict = { col", "\"\"\"Choose a subset of a dataset. :param condition: a valid condition returning boolean", "type): raise RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type() \") def wrap(boadata_type: type)", "= None, source: 'DataObject' = None, **kwargs): if self.real_type and not isinstance(inner_data, self.real_type):", "wrap result :param unwrap_args: Whether to unwrap arguments :param same_class: Whether to try", "= getattr(self.inner_data, method_name) result = method(*args, **kwargs) try: result = DataObject.from_native(result) except: pass", "support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\")", "method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper def evaluate(self, expression:", "source=self) def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a method defined on the", "native object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject):", "None @property def title(self) -> str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri)", "\"\"\" Possible methods: - add_column(key, expression, **kwargs) - based on evaluate - \"\"\"", "from an URI. :param uri: URI in the odo sense This method should", "\"\"\"Convert to another boadata-supported type. Auto-conversion returns the same object. Default implementation is", "\"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None @property def name(self) ->", "Any: \"\"\"Do calculation on columns of the dataset. :param expression: a valid expression", "{ col : self[col].inner_data for col in self.columns if isinstance(col, str) } global_dict", "the value variables) Default variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return", "called as DataObject.from_uri, it first tries to find an appropriate class by checking", "= [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion", "to DataObject. \"\"\" # TODO: Check that it is ok (see proxy etc.,", "class :param through: if None, done via inner_data, otherwise through a named type", ":param source: From where we obtained the object (kept as weak reference) It", "<filename>boadata/core/data_object.py from __future__ import annotations import weakref from collections import OrderedDict from typing", "if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator import mul from functools import", "try: return DataObject.from_native(result) except: return result return proxied_method for method_name in method_names: setattr(boadata_type,", "key[0] == self.type_name and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert", "= OrderedDict() registered_default_types = {} @staticmethod def register_type(default: bool = False) -> Callable[[type],", "self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available = [key[1] for key", "data object. Automatically discovers conversion in the form of __to_type__ and __from_type__ (see", "methods related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) -> bool: return False", "\".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression,", "sql WHERE clause \"\"\" if sql: if not \"sql\" in dir(self): raise RuntimeError(\"Object", "if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name,", "boadata_type._registered = True return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods related to", "via inner_data, otherwise through a named type It is not possible to proxy", "result return proxied_method for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return", "str]]: return [ key for (key, conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name", "isinstance(default, type): raise RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type() \") def wrap(boadata_type:", "return wrap class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri:", "make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg) for arg in", "the data. Example: Shape of the 4x3 matrix is (4, 3) \"\"\" if", "= ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else: return result def", "matrix is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property", "of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name,", "- \"\"\" @property def shape(self) -> Tuple[int, ...]: \"\"\"Shape of the data. Example:", "otherwise through a named type It is not possible to proxy slots, but", "{0} does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data WHERE {0}\".format(condition)", "# TODO: Allow to be lambda import numpy as np if not self.size:", "if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has to be a", "self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has to", "Example: Shape of the 4x3 matrix is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"):", "Based on numexpr library \"\"\" local_dict = { col : self[col].inner_data for col", "DataObject: \"\"\"\"Create an object of this class from an URI. :param uri: URI", "self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional argument: 'new_type_name', available argument values: {0}\".format(\",", "if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return boadata_type return wrap class", "(key, conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)] def convert(self, new_type_name:", "[methods] else: method_names = methods def make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args:", "method_name) result = native_method(*args, **kwargs) if not wrap: return result elif same_class and", "return len(self.shape) @property def size(self) -> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else:", "(kept as weak reference) It is necessary to keep all arguments keyword (enforceable", "new_type, new_type_name = new_type_name, new_type_name.type_name else: if not new_type_name in DataObject.registered_types: return False", "unwrap arguments :param same_class: Whether to try to convert to self's class :param", "missing 1 required positional argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available))) # TODO:", "[boadata.unwrap(arg) for arg in args] kwargs = {key: boadata.unwrap(value) for key, value in", "object that contains data representable by boadata. :type registered_types: OrderedDict[str, type] :param source:", "returns the same object. Default implementation is based on odo. \"\"\" if not", "numexpr as ne import numpy as np from boadata.core.data_conversion import ConversionUnknown, DataConversion if", "self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available)))", "-> Tuple[int, ...]: \"\"\"Shape of the data. Example: Shape of the 4x3 matrix", "if isinstance(methods, str): method_names = [methods] else: method_names = methods def make_method(method_name): def", "(4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property def ndim(self)", "Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the value variables) Default variant understands pandas", "_DataObjectInterface): '''A basic object that contains data representable by boadata. :type registered_types: OrderedDict[str,", "numpy as np if not self.size: mask = [] else: mask = self.evaluate(condition,", "if key[0] == self.type_name and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) -> DataObject:", "(in multidimensional mappings, the value variables) Default variant understands pandas DataFrames \"\"\" if", "matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape)", "-> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered =", "-> bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name else:", "type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True", "for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1 required", "make_method(method_name)) return boadata_type return wrapper def evaluate(self, expression: str, wrap: bool = True)", "Any, ClassVar, List, Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception):", "checking all registered types. \"\"\" if cls == DataObject: last_exception = None for", "if unwrap_args: args = [boadata.unwrap(arg) for arg in args] kwargs = {key: boadata.unwrap(value)", "converts the result to DataObject. \"\"\" # TODO: Check that it is ok", "= True, unwrap_args: bool = True, same_class: bool = True, through: Optional[type] =", "arguments keyword (enforceable in Python 3). ''' def __init__(self, inner_data: Any = None,", "weakref from collections import OrderedDict from typing import TYPE_CHECKING, Final import blinker import", "first tries to find an appropriate class by checking all registered types. \"\"\"", "None, **kwargs): if self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner", "args = [boadata.unwrap(arg) for arg in args] kwargs = {key: boadata.unwrap(value) for key,", "global_dict = { \"nan\" : np.nan, \"inf\" : np.inf } result = ne.evaluate(expression,", "TypeError(\"convert() missing 1 required positional argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available))) #", "of the data. Example: Shape of the 4x3 matrix is (4, 3) \"\"\"", "URI in the odo sense This method should be overridden in daughter classes.", "ClassVar[Type] = None type_name: ClassVar[str] = None @property def title(self) -> str: return", "= uri if source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic", "argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name))", "default: Whether to serve as DataObject.from_native handler for the real type of the", "def title(self) -> str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def", "Whether to wrap result :param unwrap_args: Whether to unwrap arguments :param same_class: Whether", "a method defined on the native object. If possible, converts the result to", "conversion: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise", "argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available))) # TODO: check argument? new_type =", "UnknownDataObjectError(\"Cannot interpret native object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if", "if not conversion: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] ==", "new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression, **kwargs) - based", "uri: str) -> bool: return False @classmethod def from_uri(cls, uri: str, **kwargs) ->", "\"shape\"): return tuple(self.inner_data.shape) return () @property def ndim(self) -> int: \"\"\"Dimensionality of the", "data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri", "title(self) -> str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods,", "import annotations import weakref from collections import OrderedDict from typing import TYPE_CHECKING, Final", "add_column(key, expression, **kwargs) - based on evaluate - \"\"\" @property def shape(self) ->", "result elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except:", "available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown", "elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return", "this class from an URI. :param uri: URI in the odo sense This", "new_type): return True if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion =", "inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri = uri if source: self.source =", "= [] else: mask = self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The", "type): new_type, new_type_name = new_type_name, new_type_name.type_name else: if not new_type_name in DataObject.registered_types: return", "False) -> Callable[[type], type]: \"\"\"Decorator that registers the data type :param default: Whether", "(see proxy etc., consider a clever proxy attribute) method = getattr(self.inner_data, method_name) result", "return result return proxied_method for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type", "\"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod def register_type(default:", "not self.size: mask = [] else: mask = self.evaluate(condition, wrap=False) if mask.dtype !=", "\"\"\" if isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name else: if not new_type_name", "\"\"\" if cls == DataObject: if isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object))", "class from an URI. :param uri: URI in the odo sense This method", "return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs)", "\"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict()", ")) self.inner_data = inner_data self.uri = uri if source: self.source = weakref.ref(source) changed", "to be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args,", "last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as", "new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name =", "DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) ->", "isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead of expected `{1}`\".format(", "getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if not", "find an appropriate class by checking all registered types. \"\"\" if cls ==", "dir(self): raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM", "proxied slots :-) \"\"\" import boadata def wrapper(boadata_type): if isinstance(methods, str): method_names =", "= native_method(*args, **kwargs) if not wrap: return result elif same_class and isinstance(result, self.real_type):", "@property def columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the value variables)", "True, same_class: bool = True, through: Optional[type] = None): \"\"\"Decorator to apply on", "else: # TODO: Allow to be lambda import numpy as np if not", "the data type :param default: Whether to serve as DataObject.from_native handler for the", "**kwargs): if self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data:", "\"\"\" if sql: if not \"sql\" in dir(self): raise RuntimeError(\"Object {0} does not", "tuple(self.inner_data.shape) return () @property def ndim(self) -> int: \"\"\"Dimensionality of the data. Example:", "object (kept as weak reference) It is necessary to keep all arguments keyword", "self.real_type.__name__ )) self.inner_data = inner_data self.uri = uri if source: self.source = weakref.ref(source)", "col : self[col].inner_data for col in self.columns if isinstance(col, str) } global_dict =", "raise UnknownDataObjectError(\"Cannot interpret native object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else:", "boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def", "sql: if True, the condition is evaluated as sql WHERE clause \"\"\" if", "new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property", "blinker import numexpr as ne import numpy as np from boadata.core.data_conversion import ConversionUnknown,", "**kwargs): if unwrap_args: args = [boadata.unwrap(arg) for arg in args] kwargs = {key:", "be lambda import numpy as np if not self.size: mask = [] else:", ":param wrap: whether to convert back to DataObject or return the native result", "through: Optional[type] = None): \"\"\"Decorator to apply on DataObject descendants. :param wrap: Whether", "@classmethod def from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\" :param native_object: :param kwargs:", "is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name", "\"\"\"Shape of the data. Example: Shape of the 4x3 matrix is (4, 3)", "None @property def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return", "= None type_name: ClassVar[str] = None @property def title(self) -> str: return repr(self)", "through: if None, done via inner_data, otherwise through a named type It is", "apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a method defined on the native object.", "WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO: Allow to be lambda import", "the 4x3 matrix is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return", "data type :param default: Whether to serve as DataObject.from_native handler for the real", "self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return result return proxied_method for method_name in", "getattr(self.inner_data, method_name) result = method(*args, **kwargs) try: result = DataObject.from_native(result) except: pass return", "exc: last_exception = exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else:", "if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property def ndim(self) -> int: \"\"\"Dimensionality", "in the form of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type):", "-> int: \"\"\"Dimensionality of the data. Example: A 4x3 matrix has dimensionality 2.", "reduce reduce(mul, self.shape, 1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else:", "same_class: bool = True, through: Optional[type] = None): \"\"\"Decorator to apply on DataObject", "DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return", "''' def __init__(self, inner_data: Any = None, uri: str = None, source: 'DataObject'", "collections import OrderedDict from typing import TYPE_CHECKING, Final import blinker import numexpr as", "-> DataObject: \"\"\" :param native_object: :param kwargs: :return: Is idempotent \"\"\" if cls", "= DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available = [key[1] for key in DataConversion.registered_conversions.keys()", "Possible methods: - add_column(key, expression, **kwargs) - based on evaluate - \"\"\" @property", "all registered types. \"\"\" if cls == DataObject: last_exception = None for type_", "object. Automatically discovers conversion in the form of __to_type__ and __from_type__ (see DataConversion.discover)", "-> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the value variables) Default variant understands", "proxy_methods(methods, wrap: bool = True, unwrap_args: bool = True, same_class: bool = True,", "not possible to proxy slots, but it is possible to inherit proxied slots", "is possible to inherit proxied slots :-) \"\"\" import boadata def wrapper(boadata_type): if", "**kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self,", "typing import Any, ClassVar, List, Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\"", "type] :param source: From where we obtained the object (kept as weak reference)", "else: return len(self.shape) @property def size(self) -> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size)", "in kwargs.items()} if through: native_method = getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name)", "to apply on DataObject descendants. :param wrap: Whether to wrap result :param unwrap_args:", "import TYPE_CHECKING, Final import blinker import numexpr as ne import numpy as np", "True if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)]", "in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception as exc: last_exception", "def allowed_conversions(self) -> List[Tuple[str, str]]: return [ key for (key, conversion) in DataConversion.registered_conversions.items()", "reference) It is necessary to keep all arguments keyword (enforceable in Python 3).", "DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of the type {0}.\".format(type(native_object)))", "def proxied_method(self, *args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg) for arg in args]", "basic object that contains data representable by boadata. :type registered_types: OrderedDict[str, type] :param", "else: from operator import mul from functools import reduce reduce(mul, self.shape, 1) @property", "not new_type_name: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name]", "DataObject.from_uri, it first tries to find an appropriate class by checking all registered", "str, **kwargs) -> DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion returns the same", "name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None class DataObject(_DataObjectRegistry,", "an object of this class from an URI. :param uri: URI in the", "= {} @staticmethod def register_type(default: bool = False) -> Callable[[type], type]: \"\"\"Decorator that", "Check that it is ok (see proxy etc., consider a clever proxy attribute)", "str) } global_dict = { \"nan\" : np.nan, \"inf\" : np.inf } result", "return True if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name,", "{ \"nan\" : np.nan, \"inf\" : np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict)", "(enforceable in Python 3). ''' def __init__(self, inner_data: Any = None, uri: str", "to convert back to DataObject or return the native result Based on numexpr", "a clever proxy attribute) method = getattr(self.inner_data, method_name) result = method(*args, **kwargs) try:", "if isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot", "pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None @property def", "return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property def", "1 required positional argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available))) # TODO: check", ":param through: if None, done via inner_data, otherwise through a named type It", "object. Default implementation is based on odo. \"\"\" if not new_type_name: available =", "a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply", "inner data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data", "= True, through: Optional[type] = None): \"\"\"Decorator to apply on DataObject descendants. :param", "DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) ->", "form of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid", "= weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic data objects real_type: ClassVar[Type] =", "columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the value variables) Default variant", "{2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods:", "return int(self.inner_data.ndim) else: return len(self.shape) @property def size(self) -> int: if hasattr(self.inner_data, \"size\"):", "_DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) -> bool:", "native_object: :param kwargs: :return: Is idempotent \"\"\" if cls == DataObject: if isinstance(native_object,", "from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\" :param native_object: :param kwargs: :return: Is", "inherit proxied slots :-) \"\"\" import boadata def wrapper(boadata_type): if isinstance(methods, str): method_names", "4x3 matrix is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return ()", "evaluate(self, expression: str, wrap: bool = True) -> Any: \"\"\"Do calculation on columns", "from __future__ import annotations import weakref from collections import OrderedDict from typing import", "boadata_type boadata_type._registered = True return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods related", "is necessary to keep all arguments keyword (enforceable in Python 3). ''' def", "return False @classmethod def from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create an object", "convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion returns", "based on odo. \"\"\" if not new_type_name: available = [key[1] for key in", "\".join(available))) # TODO: check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self", "we obtained the object (kept as weak reference) It is necessary to keep", "None, source: 'DataObject' = None, **kwargs): if self.real_type and not isinstance(inner_data, self.real_type): raise", "Optional[type] = None): \"\"\"Decorator to apply on DataObject descendants. :param wrap: Whether to", "== DataObject: if isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type:", "isinstance(col, str) } global_dict = { \"nan\" : np.nan, \"inf\" : np.inf }", "conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)] def convert(self, new_type_name: str,", "bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]: return [ key for", "else: return result def where(self, condition: str, sql: bool = False) -> 'DataObject':", "self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic data objects real_type: ClassVar[Type]", "if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of the type {0}.\".format(type(native_object))) return", "= boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return boadata_type", "based on evaluate - \"\"\" @property def shape(self) -> Tuple[int, ...]: \"\"\"Shape of", "-> str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap:", "data. Example: A 4x3 matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return", "*args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg) for arg in args] kwargs =", "DataConversion if TYPE_CHECKING: from typing import Any, ClassVar, List, Optional, Tuple, Type, Union,", "registered types. \"\"\" if cls == DataObject: last_exception = None for type_ in", "global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else: return result def where(self, condition: str,", "new_type_name: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise", "= \"SELECT * FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO:", "conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) -> bool:", "__from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use of decorator. Please,", "odo. \"\"\" if not new_type_name: available = [key[1] for key in DataConversion.registered_conversions.keys() if", "URI. :param uri: URI in the odo sense This method should be overridden", "new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if", "boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import Any, ClassVar, List, Optional,", "**kwargs) class _DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression, **kwargs) - based on", "if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}'", "to wrap result :param unwrap_args: Whether to unwrap arguments :param same_class: Whether to", "if TYPE_CHECKING: from typing import Any, ClassVar, List, Optional, Tuple, Type, Union, Callable", "clause \"\"\" if sql: if not \"sql\" in dir(self): raise RuntimeError(\"Object {0} does", "not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object,", "to convert to self's class :param through: if None, done via inner_data, otherwise", "the odo sense This method should be overridden in daughter classes. When called", "\"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property def ndim(self) -> int:", "@classmethod def accepts_uri(cls, uri: str) -> bool: return False @classmethod def from_uri(cls, uri:", "return result def where(self, condition: str, sql: bool = False) -> 'DataObject': \"\"\"Choose", "None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception", "type of the data object. Automatically discovers conversion in the form of __to_type__", "\"\"\"Column names (in multidimensional mappings, the value variables) Default variant understands pandas DataFrames", ":type registered_types: OrderedDict[str, type] :param source: From where we obtained the object (kept", "= False) -> Callable[[type], type]: \"\"\"Decorator that registers the data type :param default:", "isinstance(self, new_type): return True if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion", "def convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion", "for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper def evaluate(self,", "= None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except", "has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property", "isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return result return proxied_method", "method_name, make_method(method_name)) return boadata_type return wrapper def evaluate(self, expression: str, wrap: bool =", "bool = True) -> Any: \"\"\"Do calculation on columns of the dataset. :param", "boolean :param sql: if True, the condition is evaluated as sql WHERE clause", "does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data WHERE {0}\".format(condition) return", "annotations import weakref from collections import OrderedDict from typing import TYPE_CHECKING, Final import", "self.shape, 1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None", "wrap: Whether to wrap result :param unwrap_args: Whether to unwrap arguments :param same_class:", "new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible methods: -", "setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper def evaluate(self, expression: str, wrap: bool", "self.inner_data.dtype else: return None @property def columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional", "and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use of decorator.", "array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a method", "ne import numpy as np from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from", ":return: Is idempotent \"\"\" if cls == DataObject: if isinstance(native_object, DataObject): return native_object", "operator import mul from functools import reduce reduce(mul, self.shape, 1) @property def dtype(self):", "types. \"\"\" if cls == DataObject: last_exception = None for type_ in DataObject.registered_types.values():", "overridden in daughter classes. When called as DataObject.from_uri, it first tries to find", "return tuple(self.inner_data.shape) return () @property def ndim(self) -> int: \"\"\"Dimensionality of the data.", "!= np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has to be a boolean array\")", "bool = False) -> Callable[[type], type]: \"\"\"Decorator that registers the data type :param", "DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception as exc: last_exception =", "source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic data objects real_type:", "else: method_names = methods def make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args: args", "condition returning boolean :param sql: if True, the condition is evaluated as sql", "the data. Example: A 4x3 matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"):", "conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) -> bool: return False @classmethod def from_uri(cls,", "sql: bool = False) -> 'DataObject': \"\"\"Choose a subset of a dataset. :param", "A 4x3 matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else:", "returning boolean :param sql: if True, the condition is evaluated as sql WHERE", "@property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None @property def", "def columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the value variables) Default", "hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property def size(self) -> int: if", "int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator import mul from functools", "uri: str = None, source: 'DataObject' = None, **kwargs): if self.real_type and not", "numexpr library \"\"\" local_dict = { col : self[col].inner_data for col in self.columns", "else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that contains data", "that it is ok (see proxy etc., consider a clever proxy attribute) method", "not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead of expected", "a dataset. :param condition: a valid condition returning boolean :param sql: if True,", "str, *args, **kwargs): \"\"\"Apply a method defined on the native object. If possible,", "[key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1", "Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception):", "ok (see proxy etc., consider a clever proxy attribute) method = getattr(self.inner_data, method_name)", "the native object. If possible, converts the result to DataObject. \"\"\" # TODO:", "FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO: Allow to be", "self.uri = uri if source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") # For", "implementation is based on odo. \"\"\" if not new_type_name: available = [key[1] for", "last_exception = None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs)", "**kwargs) -> DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion returns the same object.", "\"\"\"\"Create an object of this class from an URI. :param uri: URI in", "defined on the native object. If possible, converts the result to DataObject. \"\"\"", "**kwargs) -> DataObject: \"\"\" :param native_object: :param kwargs: :return: Is idempotent \"\"\" if", "False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) ->", "expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri = uri if source:", "def from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create an object of this class", "be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args, **kwargs):", "as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\" :param native_object:", "'DataObject': \"\"\"Choose a subset of a dataset. :param condition: a valid condition returning", "True, through: Optional[type] = None): \"\"\"Decorator to apply on DataObject descendants. :param wrap:", "if source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic data objects", "from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import Any, ClassVar, List,", "sense This method should be overridden in daughter classes. When called as DataObject.from_uri,", "for key, value in kwargs.items()} if through: native_method = getattr(self.convert(through), method_name) else: native_method", "data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]: return [ key for (key, conversion)", "DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type()", "not new_type_name in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return", "try: return type_.from_uri(uri, **kwargs) except Exception as exc: last_exception = exc if last_exception:", "condition has to be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name:", "3). ''' def __init__(self, inner_data: Any = None, uri: str = None, source:", "DataObject.register_type() \") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default:", "key, value in kwargs.items()} if through: native_method = getattr(self.convert(through), method_name) else: native_method =", "return type_.from_uri(uri, **kwargs) except Exception as exc: last_exception = exc if last_exception: raise", "by boadata. :type registered_types: OrderedDict[str, type] :param source: From where we obtained the", "last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod", "-> DataObject: \"\"\"\"Create an object of this class from an URI. :param uri:", "\"\"\"Dimensionality of the data. Example: A 4x3 matrix has dimensionality 2. \"\"\" if", "def __init__(self, inner_data: Any = None, uri: str = None, source: 'DataObject' =", "clever proxy attribute) method = getattr(self.inner_data, method_name) result = method(*args, **kwargs) try: result", "Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\" Possible", "It is necessary to keep all arguments keyword (enforceable in Python 3). '''", "apply on DataObject descendants. :param wrap: Whether to wrap result :param unwrap_args: Whether", "@property def ndim(self) -> int: \"\"\"Dimensionality of the data. Example: A 4x3 matrix", "__repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool = True, unwrap_args: bool", "else: mask = self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of", "inner_data, otherwise through a named type It is not possible to proxy slots,", "as DataObject.from_native handler for the real type of the data object. Automatically discovers", "real_type: ClassVar[Type] = None type_name: ClassVar[str] = None @property def title(self) -> str:", "allowed_conversions(self) -> List[Tuple[str, str]]: return [ key for (key, conversion) in DataConversion.registered_conversions.items() if", "table_name=\"data\") else: # TODO: Allow to be lambda import numpy as np if", "is not possible to proxy slots, but it is possible to inherit proxied", "last_exception = exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise", "None type_name: ClassVar[str] = None @property def title(self) -> str: return repr(self) def", "ClassVar, List, Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\"", "SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else:", "wrap: bool = True) -> Any: \"\"\"Do calculation on columns of the dataset.", "wrap: whether to convert back to DataObject or return the native result Based", "for (key, conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)] def convert(self,", "new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if not (self.type_name, new_type_name) in", "native_method(*args, **kwargs) if not wrap: return result elif same_class and isinstance(result, self.real_type): return", "import boadata def wrapper(boadata_type): if isinstance(methods, str): method_names = [methods] else: method_names =", "= DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) -> bool: return", "of the 4x3 matrix is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape)", "dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None @property def columns(self) ->", "from typing import TYPE_CHECKING, Final import blinker import numexpr as ne import numpy", "boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return boadata_type return", "DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion returns the same object. Default implementation", "import numexpr as ne import numpy as np from boadata.core.data_conversion import ConversionUnknown, DataConversion", "Callable[[type], type]: \"\"\"Decorator that registers the data type :param default: Whether to serve", "of decorator. Please, use DataObject.register_type() \") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] =", "str, **kwargs) -> DataObject: \"\"\"\"Create an object of this class from an URI.", "\"columns\"): return list(self.inner_data.columns.values) else: return None @property def name(self) -> Optional[str]: if hasattr(self.inner_data,", "back to DataObject or return the native result Based on numexpr library \"\"\"", "named type It is not possible to proxy slots, but it is possible", "key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional", "_DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod def register_type(default: bool =", "@property def shape(self) -> Tuple[int, ...]: \"\"\"Shape of the data. Example: Shape of", "\"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property def size(self) ->", "else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name:", "str, sql: bool = False) -> 'DataObject': \"\"\"Choose a subset of a dataset.", "int(self.inner_data.ndim) else: return len(self.shape) @property def size(self) -> int: if hasattr(self.inner_data, \"size\"): return", "Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface):", "\"size\"): return int(self.inner_data.size) else: from operator import mul from functools import reduce reduce(mul,", "def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None class", "-> 'DataObject': \"\"\"Choose a subset of a dataset. :param condition: a valid condition", "np.nan, \"inf\" : np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return", "local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else: return result def where(self, condition:", "attribute) method = getattr(self.inner_data, method_name) result = method(*args, **kwargs) try: result = DataObject.from_native(result)", "but it is possible to inherit proxied slots :-) \"\"\" import boadata def", "if isinstance(col, str) } global_dict = { \"nan\" : np.nan, \"inf\" : np.inf", "self.inner_data = inner_data self.uri = uri if source: self.source = weakref.ref(source) changed =", "wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type", "accepts_uri(cls, uri: str) -> bool: return False @classmethod def from_uri(cls, uri: str, **kwargs)", "size(self) -> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator import mul", "sql: if not \"sql\" in dir(self): raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__))", "UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls,", "slots, but it is possible to inherit proxied slots :-) \"\"\" import boadata", "the object (kept as weak reference) It is necessary to keep all arguments", "self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return result return proxied_method for", "True) -> Any: \"\"\"Do calculation on columns of the dataset. :param expression: a", "uri if source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic data", "if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A", "is evaluated as sql WHERE clause \"\"\" if sql: if not \"sql\" in", "\"dtype\"): return self.inner_data.dtype else: return None @property def columns(self) -> Optional[List[str]]: \"\"\"Column names", "@classmethod def from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create an object of this", "convert to self's class :param through: if None, done via inner_data, otherwise through", "on the native object. If possible, converts the result to DataObject. \"\"\" #", "mask = self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition", "type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception as exc:", "-> List[Tuple[str, str]]: return [ key for (key, conversion) in DataConversion.registered_conversions.items() if key[0]", "available argument values: {0}\".format(\", \".join(available))) # TODO: check argument? new_type = DataObject.registered_types[new_type_name] if", "if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception as exc: last_exception = exc", "to serve as DataObject.from_native handler for the real type of the data object.", "DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native", "keyword (enforceable in Python 3). ''' def __init__(self, inner_data: Any = None, uri:", "boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a", "of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use", "new_type_name) in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def", "-> DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion returns the same object. Default", ":param native_object: :param kwargs: :return: Is idempotent \"\"\" if cls == DataObject: if", "variables) Default variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else:", "cls == DataObject: if isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not", "\"\"\"Decorator to apply on DataObject descendants. :param wrap: Whether to wrap result :param", "wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has to be", "def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a method defined on the native", "if isinstance(self, new_type): return True if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False", "names (in multidimensional mappings, the value variables) Default variant understands pandas DataFrames \"\"\"", "DataObject: \"\"\" :param native_object: :param kwargs: :return: Is idempotent \"\"\" if cls ==", "'{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any,", "OrderedDict() registered_default_types = {} @staticmethod def register_type(default: bool = False) -> Callable[[type], type]:", "same_class: Whether to try to convert to self's class :param through: if None,", "UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs) -> DataObject:", "except: return result return proxied_method for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return", "mul from functools import reduce reduce(mul, self.shape, 1) @property def dtype(self): if hasattr(self.inner_data,", "\"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {}", "_DataObjectConversions, _DataObjectInterface): '''A basic object that contains data representable by boadata. :type registered_types:", "@property def size(self) -> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator", "key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name,", "to be lambda import numpy as np if not self.size: mask = []", "raise TypeError(\"convert() missing 1 required positional argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available)))", "of the data object. Automatically discovers conversion in the form of __to_type__ and", "return boadata_type return wrapper def evaluate(self, expression: str, wrap: bool = True) ->", "on evaluate - \"\"\" @property def shape(self) -> Tuple[int, ...]: \"\"\"Shape of the", "Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class", "descendants. :param wrap: Whether to wrap result :param unwrap_args: Whether to unwrap arguments", "= [methods] else: method_names = methods def make_method(method_name): def proxied_method(self, *args, **kwargs): if", "return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return result return proxied_method for method_name", "@property def title(self) -> str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod", "= [boadata.unwrap(arg) for arg in args] kwargs = {key: boadata.unwrap(value) for key, value", "import numpy as np if not self.size: mask = [] else: mask =", "**kwargs) - based on evaluate - \"\"\" @property def shape(self) -> Tuple[int, ...]:", "that registers the data type :param default: Whether to serve as DataObject.from_native handler", "dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property def", "import blinker import numexpr as ne import numpy as np from boadata.core.data_conversion import", "as sql WHERE clause \"\"\" if sql: if not \"sql\" in dir(self): raise", "\"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type() \")", "-> Any: \"\"\"Do calculation on columns of the dataset. :param expression: a valid", "if not self.size: mask = [] else: mask = self.evaluate(condition, wrap=False) if mask.dtype", "register_type(default: bool = False) -> Callable[[type], type]: \"\"\"Decorator that registers the data type", "a named type It is not possible to proxy slots, but it is", "value in kwargs.items()} if through: native_method = getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data,", "in args] kwargs = {key: boadata.unwrap(value) for key, value in kwargs.items()} if through:", "def is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str,", "Auto-conversion returns the same object. Default implementation is based on odo. \"\"\" if", "in daughter classes. When called as DataObject.from_uri, it first tries to find an", "= True return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\"", "args] kwargs = {key: boadata.unwrap(value) for key, value in kwargs.items()} if through: native_method", "conversion from {0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type,", "type_.from_uri(uri, **kwargs) except Exception as exc: last_exception = exc if last_exception: raise last_exception", "cls == DataObject: last_exception = None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try:", "return list(self.inner_data.columns.values) else: return None @property def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"):", "positional argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available))) # TODO: check argument? new_type", "weak reference) It is necessary to keep all arguments keyword (enforceable in Python", "objects real_type: ClassVar[Type] = None type_name: ClassVar[str] = None @property def title(self) ->", "on numexpr library \"\"\" local_dict = { col : self[col].inner_data for col in", "(see DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use of decorator. Please, use", "proxy attribute) method = getattr(self.inner_data, method_name) result = method(*args, **kwargs) try: result =", "(self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod", "\"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property def size(self) -> int: if hasattr(self.inner_data,", "unwrap_args: Whether to unwrap arguments :param same_class: Whether to try to convert to", "np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else:", "type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return", "Allow to be lambda import numpy as np if not self.size: mask =", "handler for the real type of the data object. Automatically discovers conversion in", "@staticmethod def proxy_methods(methods, wrap: bool = True, unwrap_args: bool = True, same_class: bool", "data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO: Allow to be lambda", "return DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a method defined", "if not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return", "def accepts_uri(cls, uri: str) -> bool: return False @classmethod def from_uri(cls, uri: str,", "if isinstance(default, type): raise RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type() \") def", "new_type_name.type_name else: if not new_type_name in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if", "[key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from", "in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls,", "required positional argument: 'new_type_name', available argument values: {0}\".format(\", \".join(available))) # TODO: check argument?", "return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object", "= None @property def title(self) -> str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__,", "method_names = [methods] else: method_names = methods def make_method(method_name): def proxied_method(self, *args, **kwargs):", "new_type_name)) if not conversion: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0]", "DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return boadata_type return wrap", "same object. Default implementation is based on odo. \"\"\" if not new_type_name: available", "if cls == DataObject: last_exception = None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri):", "self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead", "inner_data self.uri = uri if source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\") #", "DataObject.from_native(result) except: return result return proxied_method for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name))", "the condition is evaluated as sql WHERE clause \"\"\" if sql: if not", "self.sql(query, table_name=\"data\") else: # TODO: Allow to be lambda import numpy as np", "contains data representable by boadata. :type registered_types: OrderedDict[str, type] :param source: From where", "method defined on the native object. If possible, converts the result to DataObject.", "When called as DataObject.from_uri, it first tries to find an appropriate class by", "\"inf\" : np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result,", "conversion in the form of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default,", "ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import Any, ClassVar, List, Optional, Tuple, Type,", "boadata.unwrap(value) for key, value in kwargs.items()} if through: native_method = getattr(self.convert(through), method_name) else:", ":param same_class: Whether to try to convert to self's class :param through: if", "self's class :param through: if None, done via inner_data, otherwise through a named", "raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data", "* FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO: Allow to", "done via inner_data, otherwise through a named type It is not possible to", "the result to DataObject. \"\"\" # TODO: Check that it is ok (see", "hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator import mul from functools import reduce", "str = None, source: 'DataObject' = None, **kwargs): if self.real_type and not isinstance(inner_data,", "from collections import OrderedDict from typing import TYPE_CHECKING, Final import blinker import numexpr", "boadata. :type registered_types: OrderedDict[str, type] :param source: From where we obtained the object", "return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod def", "DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to {1}. Available:", "None @property def columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the value", "lambda import numpy as np if not self.size: mask = [] else: mask", "\"\"\"Decorator that registers the data type :param default: Whether to serve as DataObject.from_native", "List, Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class", "{} @staticmethod def register_type(default: bool = False) -> Callable[[type], type]: \"\"\"Decorator that registers", "key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to", "the native result Based on numexpr library \"\"\" local_dict = { col :", "real type of the data object. Automatically discovers conversion in the form of", "to another boadata-supported type. Auto-conversion returns the same object. Default implementation is based", "use of decorator. Please, use DataObject.register_type() \") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name]", "def wrapper(boadata_type): if isinstance(methods, str): method_names = [methods] else: method_names = methods def", "library \"\"\" local_dict = { col : self[col].inner_data for col in self.columns if", "it is ok (see proxy etc., consider a clever proxy attribute) method =", "object. If possible, converts the result to DataObject. \"\"\" # TODO: Check that", "return int(self.inner_data.size) else: from operator import mul from functools import reduce reduce(mul, self.shape,", "# TODO: Check that it is ok (see proxy etc., consider a clever", "\") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type]", "OrderedDict from typing import TYPE_CHECKING, Final import blinker import numexpr as ne import", "to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) -> bool: return False @classmethod def", "\"\"\" if cls == DataObject: last_exception = None for type_ in DataObject.registered_types.values(): if", "= [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing", "return self.inner_data.dtype else: return None @property def columns(self) -> Optional[List[str]]: \"\"\"Column names (in", "return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool = True, unwrap_args: bool =", "if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None @property def name(self) -> Optional[str]:", "native result Based on numexpr library \"\"\" local_dict = { col : self[col].inner_data", "shape(self) -> Tuple[int, ...]: \"\"\"Shape of the data. Example: Shape of the 4x3", "available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert()", "\"\"\" if not new_type_name: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0]", "{0}\".format(\", \".join(available))) # TODO: check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return", "-> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator import mul from", ":-) \"\"\" import boadata def wrapper(boadata_type): if isinstance(methods, str): method_names = [methods] else:", "__future__ import annotations import weakref from collections import OrderedDict from typing import TYPE_CHECKING,", "to find an appropriate class by checking all registered types. \"\"\" if cls", "if sql: if not \"sql\" in dir(self): raise RuntimeError(\"Object {0} does not support", "method_name) result = method(*args, **kwargs) try: result = DataObject.from_native(result) except: pass return result", "should be overridden in daughter classes. When called as DataObject.from_uri, it first tries", "self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that contains", "method_name: str, *args, **kwargs): \"\"\"Apply a method defined on the native object. If", "TODO: Check that it is ok (see proxy etc., consider a clever proxy", "return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available = [key[1] for", "native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\"", "Is idempotent \"\"\" if cls == DataObject: if isinstance(native_object, DataObject): return native_object boadata_type", "raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def", "int(self.inner_data.size) else: from operator import mul from functools import reduce reduce(mul, self.shape, 1)", "type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception as exc: last_exception = exc if", "subset of a dataset. :param condition: a valid condition returning boolean :param sql:", "else: if not new_type_name in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if isinstance(self,", "if None, done via inner_data, otherwise through a named type It is not", "calculation on columns of the dataset. :param expression: a valid expression :param wrap:", "isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret", "default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return boadata_type return wrap class _DataObjectConversions:", "native_method = getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if not wrap: return result", "wrap: return DataObject.from_native(result, source=self) else: return result def where(self, condition: str, sql: bool", "if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None @property def columns(self) -> Optional[List[str]]:", "ndim(self) -> int: \"\"\"Dimensionality of the data. Example: A 4x3 matrix has dimensionality", "} result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self) else: return", "same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return result", "False) -> 'DataObject': \"\"\"Choose a subset of a dataset. :param condition: a valid", "= True) -> Any: \"\"\"Do calculation on columns of the dataset. :param expression:", "weakref.ref(source) changed = blinker.Signal(\"changed\") # For dynamic data objects real_type: ClassVar[Type] = None", "expression, **kwargs) - based on evaluate - \"\"\" @property def shape(self) -> Tuple[int,", "`{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri =", "in dir(self): raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT *", "type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if default: DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered", "Example: A 4x3 matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim)", "key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional argument: 'new_type_name', available argument", ": self[col].inner_data for col in self.columns if isinstance(col, str) } global_dict = {", "= DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of the type", "arguments :param same_class: Whether to try to convert to self's class :param through:", "in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional argument:", "\"name\"): return self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object", "check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name,", "return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool =", "DataObject or return the native result Based on numexpr library \"\"\" local_dict =", "to DataObject or return the native result Based on numexpr library \"\"\" local_dict", "wrapper def evaluate(self, expression: str, wrap: bool = True) -> Any: \"\"\"Do calculation", "valid condition returning boolean :param sql: if True, the condition is evaluated as", "-> bool: return False @classmethod def from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create", "DataObject. \"\"\" # TODO: Check that it is ok (see proxy etc., consider", "an appropriate class by checking all registered types. \"\"\" if cls == DataObject:", "new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to another boadata-supported type. Auto-conversion returns the", "WHERE clause \"\"\" if sql: if not \"sql\" in dir(self): raise RuntimeError(\"Object {0}", "through: native_method = getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name) result = native_method(*args,", "OrderedDict[str, type] :param source: From where we obtained the object (kept as weak", "decorator. Please, use DataObject.register_type() \") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type", "use DataObject.register_type() \") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type) if", "if through: native_method = getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name) result =", "str: return repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool", "str, wrap: bool = True) -> Any: \"\"\"Do calculation on columns of the", "source: 'DataObject' = None, **kwargs): if self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid", "import numpy as np from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing", "-> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None class DataObject(_DataObjectRegistry, _DataObjectConversions,", "else: try: return DataObject.from_native(result) except: return result return proxied_method for method_name in method_names:", "Please, use DataObject.register_type() \") def wrap(boadata_type: type) -> type: DataObject.registered_types[boadata_type.type_name] = boadata_type DataConversion.discover(boadata_type)", "in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise ConversionUnknown(\"Unknown conversion from {0} to {1}.", "= getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if not wrap: return result elif", "raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\")", "in DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs)", "the data object. Automatically discovers conversion in the form of __to_type__ and __from_type__", "it first tries to find an appropriate class by checking all registered types.", "registers the data type :param default: Whether to serve as DataObject.from_native handler for", "as np from boadata.core.data_conversion import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import Any,", "from typing import Any, ClassVar, List, Optional, Tuple, Type, Union, Callable class UnknownDataObjectError(Exception):", "{cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\" :param native_object: :param", "from functools import reduce reduce(mul, self.shape, 1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"):", "**kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name, type):", "valid expression :param wrap: whether to convert back to DataObject or return the", "type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name", "to proxy slots, but it is possible to inherit proxied slots :-) \"\"\"", "typing import TYPE_CHECKING, Final import blinker import numexpr as ne import numpy as", "registered_types: OrderedDict[str, type] :param source: From where we obtained the object (kept as", "return proxied_method for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper", "and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to another boadata-supported", "new_type_name, new_type_name.type_name else: if not new_type_name in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name]", "not conversion: available = [key[1] for key in DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name]", "expression: a valid expression :param wrap: whether to convert back to DataObject or", "for col in self.columns if isinstance(col, str) } global_dict = { \"nan\" :", "data representable by boadata. :type registered_types: OrderedDict[str, type] :param source: From where we", "of the dataset. :param expression: a valid expression :param wrap: whether to convert", "to try to convert to self's class :param through: if None, done via", "list(self.inner_data.columns.values) else: return None @property def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return", "in Python 3). ''' def __init__(self, inner_data: Any = None, uri: str =", ": np.nan, \"inf\" : np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap:", "import weakref from collections import OrderedDict from typing import TYPE_CHECKING, Final import blinker", "self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__,", "@classmethod def is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) ->", "DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None @property def name(self)", "object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return", "\"\"\" @property def shape(self) -> Tuple[int, ...]: \"\"\"Shape of the data. Example: Shape", "def make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg) for arg", "bool: return False @classmethod def from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create an", "mask = [] else: mask = self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise", "\"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name else: if not", "== DataObject: last_exception = None for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return", "[ key for (key, conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)]", "for arg in args] kwargs = {key: boadata.unwrap(value) for key, value in kwargs.items()}", "repr(self) def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool = True,", "**kwargs): \"\"\"Apply a method defined on the native object. If possible, converts the", "proxy slots, but it is possible to inherit proxied slots :-) \"\"\" import", "understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None @property", "return None @property def columns(self) -> Optional[List[str]]: \"\"\"Column names (in multidimensional mappings, the", "= { \"nan\" : np.nan, \"inf\" : np.inf } result = ne.evaluate(expression, local_dict=local_dict,", "on DataObject descendants. :param wrap: Whether to wrap result :param unwrap_args: Whether to", "as exc: last_exception = exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\")", "method = getattr(self.inner_data, method_name) result = method(*args, **kwargs) try: result = DataObject.from_native(result) except:", "native object. If possible, converts the result to DataObject. \"\"\" # TODO: Check", "kwargs = {key: boadata.unwrap(value) for key, value in kwargs.items()} if through: native_method =", "related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) -> bool: return False @classmethod", "Tuple[int, ...]: \"\"\"Shape of the data. Example: Shape of the 4x3 matrix is", "= methods def make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg)", "return () @property def ndim(self) -> int: \"\"\"Dimensionality of the data. Example: A", "= blinker.Signal(\"changed\") # For dynamic data objects real_type: ClassVar[Type] = None type_name: ClassVar[str]", "method_name) else: native_method = getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if not wrap:", "class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod def register_type(default: bool", ": np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if wrap: return DataObject.from_native(result, source=self)", "def is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type,", "of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri = uri if", "getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if not wrap: return result elif same_class", "import mul from functools import reduce reduce(mul, self.shape, 1) @property def dtype(self): if", "Whether to unwrap arguments :param same_class: Whether to try to convert to self's", "type :param default: Whether to serve as DataObject.from_native handler for the real type", "-> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]: return [ key", ":param kwargs: :return: Is idempotent \"\"\" if cls == DataObject: if isinstance(native_object, DataObject):", "def register_type(default: bool = False) -> Callable[[type], type]: \"\"\"Decorator that registers the data", "a valid condition returning boolean :param sql: if True, the condition is evaluated", "for type_ in DataObject.registered_types.values(): if type_.accepts_uri(uri): try: return type_.from_uri(uri, **kwargs) except Exception as", "{key: boadata.unwrap(value) for key, value in kwargs.items()} if through: native_method = getattr(self.convert(through), method_name)", "boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of the type {0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs)", "type. Auto-conversion returns the same object. Default implementation is based on odo. \"\"\"", "if cls == DataObject: if isinstance(native_object, DataObject): return native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if", "\"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool = True, unwrap_args: bool = True,", "from_uri(cls, uri: str, **kwargs) -> DataObject: \"\"\"\"Create an object of this class from", "def from_native(cls, native_object: Any, **kwargs) -> DataObject: \"\"\" :param native_object: :param kwargs: :return:", "possible to inherit proxied slots :-) \"\"\" import boadata def wrapper(boadata_type): if isinstance(methods,", "where(self, condition: str, sql: bool = False) -> 'DataObject': \"\"\"Choose a subset of", "return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject)", "DataObject.registered_default_types[boadata_type.real_type] = boadata_type boadata_type._registered = True return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject", "idempotent \"\"\" if cls == DataObject: if isinstance(native_object, DataObject): return native_object boadata_type =", "condition: str, sql: bool = False) -> 'DataObject': \"\"\"Choose a subset of a", "# For dynamic data objects real_type: ClassVar[Type] = None type_name: ClassVar[str] = None", "result Based on numexpr library \"\"\" local_dict = { col : self[col].inner_data for", "Python 3). ''' def __init__(self, inner_data: Any = None, uri: str = None,", "class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod", "object of this class from an URI. :param uri: URI in the odo", "DataObject.from_native(result, source=self) else: return result def where(self, condition: str, sql: bool = False)", "values: {0}\".format(\", \".join(available))) # TODO: check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type):", "appropriate class by checking all registered types. \"\"\" if cls == DataObject: last_exception", "Whether to serve as DataObject.from_native handler for the real type of the data", "2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return len(self.shape) @property def size(self)", "= new_type_name, new_type_name.type_name else: if not new_type_name in DataObject.registered_types: return False new_type =", "return False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if not (self.type_name,", "to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface:", "return DataObject.from_native(result) except: return result return proxied_method for method_name in method_names: setattr(boadata_type, method_name,", "not support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data WHERE {0}\".format(condition) return self.sql(query,", "functools import reduce reduce(mul, self.shape, 1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return", "= exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot", "interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object:", "bool = True, unwrap_args: bool = True, same_class: bool = True, through: Optional[type]", "the dataset. :param expression: a valid expression :param wrap: whether to convert back", "...]: \"\"\"Shape of the data. Example: Shape of the 4x3 matrix is (4,", "== self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional argument: 'new_type_name', available argument values:", "@property def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else: return None", "if not \"sql\" in dir(self): raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query", "= { col : self[col].inner_data for col in self.columns if isinstance(col, str) }", "Union[str, type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name, type): new_type, new_type_name = new_type_name,", ":param default: Whether to serve as DataObject.from_native handler for the real type of", "else: raise UnknownDataObjectError(f\"Cannot interpret '{uri}' as {cls.__name__}.\") @classmethod def from_native(cls, native_object: Any, **kwargs)", "Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types:", "def __repr__(self): return \"{0}(\\\"{1}\\\")\".format(self.__class__.__name__, self.uri) @staticmethod def proxy_methods(methods, wrap: bool = True, unwrap_args:", "hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property def ndim(self) -> int: \"\"\"Dimensionality of", "= inner_data self.uri = uri if source: self.source = weakref.ref(source) changed = blinker.Signal(\"changed\")", "= getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if", "expression :param wrap: whether to convert back to DataObject or return the native", "by checking all registered types. \"\"\" if cls == DataObject: last_exception = None", "= boadata_type boadata_type._registered = True return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods", "3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property def ndim(self) ->", "in self.columns if isinstance(col, str) } global_dict = { \"nan\" : np.nan, \"inf\"", "condition is evaluated as sql WHERE clause \"\"\" if sql: if not \"sql\"", "a subset of a dataset. :param condition: a valid condition returning boolean :param", "bool = True, same_class: bool = True, through: Optional[type] = None): \"\"\"Decorator to", "is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]:", "condition: a valid condition returning boolean :param sql: if True, the condition is", "def proxy_methods(methods, wrap: bool = True, unwrap_args: bool = True, same_class: bool =", "else: return None @property def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name", "method_names = methods def make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args: args =", "self.size: mask = [] else: mask = self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool):", "= False) -> 'DataObject': \"\"\"Choose a subset of a dataset. :param condition: a", "`{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri = uri if source: self.source", "if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str,", "kwargs.items()} if through: native_method = getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name) result", "raise RuntimeError(\"Invalid use of decorator. Please, use DataObject.register_type() \") def wrap(boadata_type: type) ->", "{1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs) class _DataObjectInterface: \"\"\"", "argument values: {0}\".format(\", \".join(available))) # TODO: check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self,", "is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return tuple(self.inner_data.shape) return () @property def", "_DataObjectInterface: \"\"\" Possible methods: - add_column(key, expression, **kwargs) - based on evaluate -", "Tuple, Type, Union, Callable class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\"", "as np if not self.size: mask = [] else: mask = self.evaluate(condition, wrap=False)", "in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper def evaluate(self, expression: str,", "columns of the dataset. :param expression: a valid expression :param wrap: whether to", "type of inner data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data", "another boadata-supported type. Auto-conversion returns the same object. Default implementation is based on", "\"SELECT * FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO: Allow", "of this class from an URI. :param uri: URI in the odo sense", "class UnknownDataObjectError(Exception): \"\"\"\"\"\" class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict]", "class InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types", "return None @property def name(self) -> Optional[str]: if hasattr(self.inner_data, \"name\"): return self.inner_data.name else:", "Shape of the 4x3 matrix is (4, 3) \"\"\" if hasattr(self.inner_data, \"shape\"): return", "If possible, converts the result to DataObject. \"\"\" # TODO: Check that it", "except Exception as exc: last_exception = exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot", "TYPE_CHECKING: from typing import Any, ClassVar, List, Optional, Tuple, Type, Union, Callable class", "new_type_name in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True", "unwrap_args: bool = True, same_class: bool = True, through: Optional[type] = None): \"\"\"Decorator", "arg in args] kwargs = {key: boadata.unwrap(value) for key, value in kwargs.items()} if", "boadata-supported type. Auto-conversion returns the same object. Default implementation is based on odo.", "else: native_method = getattr(self.inner_data, method_name) result = native_method(*args, **kwargs) if not wrap: return", "tries to find an appropriate class by checking all registered types. \"\"\" if", "from operator import mul from functools import reduce reduce(mul, self.shape, 1) @property def", "to self's class :param through: if None, done via inner_data, otherwise through a", "is ok (see proxy etc., consider a clever proxy attribute) method = getattr(self.inner_data,", "boadata def wrapper(boadata_type): if isinstance(methods, str): method_names = [methods] else: method_names = methods", "isinstance(new_type_name, type): new_type, new_type_name = new_type_name, new_type_name.type_name else: if not new_type_name in DataObject.registered_types:", "return self.sql(query, table_name=\"data\") else: # TODO: Allow to be lambda import numpy as", "import OrderedDict from typing import TYPE_CHECKING, Final import blinker import numexpr as ne", "registered_default_types = {} @staticmethod def register_type(default: bool = False) -> Callable[[type], type]: \"\"\"Decorator", "for the real type of the data object. Automatically discovers conversion in the", "changed = blinker.Signal(\"changed\") # For dynamic data objects real_type: ClassVar[Type] = None type_name:", "in DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if", "the real type of the data object. Automatically discovers conversion in the form", "data. Example: Shape of the 4x3 matrix is (4, 3) \"\"\" if hasattr(self.inner_data,", "mappings, the value variables) Default variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"):", "len(self.shape) @property def size(self) -> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from", "necessary to keep all arguments keyword (enforceable in Python 3). ''' def __init__(self,", "to inherit proxied slots :-) \"\"\" import boadata def wrapper(boadata_type): if isinstance(methods, str):", "isinstance(methods, str): method_names = [methods] else: method_names = methods def make_method(method_name): def proxied_method(self,", "not (self.type_name, new_type_name) in DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self)", "DataObject.registered_types: return False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if not", "def evaluate(self, expression: str, wrap: bool = True) -> Any: \"\"\"Do calculation on", "and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead of", "if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional argument: 'new_type_name', available", "DataObject descendants. :param wrap: Whether to wrap result :param unwrap_args: Whether to unwrap", "methods def make_method(method_name): def proxied_method(self, *args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg) for", "{0}\".format(condition) return self.sql(query, table_name=\"data\") else: # TODO: Allow to be lambda import numpy", "in the odo sense This method should be overridden in daughter classes. When", "None, uri: str = None, source: 'DataObject' = None, **kwargs): if self.real_type and", "Final[OrderedDict] = OrderedDict() registered_default_types = {} @staticmethod def register_type(default: bool = False) ->", "evaluate - \"\"\" @property def shape(self) -> Tuple[int, ...]: \"\"\"Shape of the data.", "None, done via inner_data, otherwise through a named type It is not possible", ":param sql: if True, the condition is evaluated as sql WHERE clause \"\"\"", "\"\"\" :param native_object: :param kwargs: :return: Is idempotent \"\"\" if cls == DataObject:", "np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has to be a boolean array\") return", "consider a clever proxy attribute) method = getattr(self.inner_data, method_name) result = method(*args, **kwargs)", "of the data. Example: A 4x3 matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data,", "np if not self.size: mask = [] else: mask = self.evaluate(condition, wrap=False) if", "InvalidDataObjectError(Exception): \"\"\"\"\"\" class UnsupportedDataOperationError(Exception): \"\"\"\"\"\" class _DataObjectRegistry: registered_types: Final[OrderedDict] = OrderedDict() registered_default_types =", "{0}.\".format(type(native_object))) return boadata_type.from_native(native_object, **kwargs) else: if isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object,", "unwrap_args: args = [boadata.unwrap(arg) for arg in args] kwargs = {key: boadata.unwrap(value) for", "whether to convert back to DataObject or return the native result Based on", "DataObject.from_native(self.inner_data[mask], source=self) def apply_native(self, method_name: str, *args, **kwargs): \"\"\"Apply a method defined on", "raise UnsupportedDataOperationError(\"The result of condition has to be a boolean array\") return DataObject.from_native(self.inner_data[mask],", "self.type_name and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to another", "RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT * FROM data WHERE", "wrap: return result elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return", "@property def allowed_conversions(self) -> List[Tuple[str, str]]: return [ key for (key, conversion) in", "result = native_method(*args, **kwargs) if not wrap: return result elif same_class and isinstance(result,", "{0} to {1}. Available: {2}\".format(self.__class__.type_name, new_type_name, \", \".join(available))) return conversion.convert(self, new_type, **kwargs) class", "\"nan\" : np.nan, \"inf\" : np.inf } result = ne.evaluate(expression, local_dict=local_dict, global_dict=global_dict) if", "\"\"\" local_dict = { col : self[col].inner_data for col in self.columns if isinstance(col,", "possible to proxy slots, but it is possible to inherit proxied slots :-)", "if True, the condition is evaluated as sql WHERE clause \"\"\" if sql:", "obtained the object (kept as weak reference) It is necessary to keep all", "cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) -> bool: \"\"\" \"\"\" if isinstance(new_type_name,", "True, the condition is evaluated as sql WHERE clause \"\"\" if sql: if", "int: \"\"\"Dimensionality of the data. Example: A 4x3 matrix has dimensionality 2. \"\"\"", "wrap class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str)", "on odo. \"\"\" if not new_type_name: available = [key[1] for key in DataConversion.registered_conversions.keys()", "to keep all arguments keyword (enforceable in Python 3). ''' def __init__(self, inner_data:", "to unwrap arguments :param same_class: Whether to try to convert to self's class", "key for (key, conversion) in DataConversion.registered_conversions.items() if key[0] == self.type_name and conversion.applies(self)] def", "= True, same_class: bool = True, through: Optional[type] = None): \"\"\"Decorator to apply", "and isinstance(result, self.real_type): return self.__class__.from_native(result) else: try: return DataObject.from_native(result) except: return result return", "\"sql\" in dir(self): raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query = \"SELECT", "DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object: DataObject) -> bool: return data_object.is_convertible_to(cls)", "result to DataObject. \"\"\" # TODO: Check that it is ok (see proxy", "possible, converts the result to DataObject. \"\"\" # TODO: Check that it is", "reduce(mul, self.shape, 1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return", "DataConversion.registered_conversions.keys() if key[0] == self.__class__.type_name] raise TypeError(\"convert() missing 1 required positional argument: 'new_type_name',", "value variables) Default variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values)", "boadata_type return wrapper def evaluate(self, expression: str, wrap: bool = True) -> Any:", "a valid expression :param wrap: whether to convert back to DataObject or return", "**kwargs) except Exception as exc: last_exception = exc if last_exception: raise last_exception raise", "return wrapper def evaluate(self, expression: str, wrap: bool = True) -> Any: \"\"\"Do", "ClassVar[str] = None @property def title(self) -> str: return repr(self) def __repr__(self): return", "def where(self, condition: str, sql: bool = False) -> 'DataObject': \"\"\"Choose a subset", "= DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if not (self.type_name, new_type_name) in DataConversion.registered_conversions:", "'new_type_name', available argument values: {0}\".format(\", \".join(available))) # TODO: check argument? new_type = DataObject.registered_types[new_type_name]", "variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None", "expression: str, wrap: bool = True) -> Any: \"\"\"Do calculation on columns of", "[] else: mask = self.evaluate(condition, wrap=False) if mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result", "if not wrap: return result elif same_class and isinstance(result, self.real_type): return self.__class__.from_native(result) else:", "an URI. :param uri: URI in the odo sense This method should be", "class by checking all registered types. \"\"\" if cls == DataObject: last_exception =", "be overridden in daughter classes. When called as DataObject.from_uri, it first tries to", "native_object: Any, **kwargs) -> DataObject: \"\"\" :param native_object: :param kwargs: :return: Is idempotent", "try to convert to self's class :param through: if None, done via inner_data,", "DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that contains data representable by boadata. :type", "dynamic data objects real_type: ClassVar[Type] = None type_name: ClassVar[str] = None @property def", ":param wrap: Whether to wrap result :param unwrap_args: Whether to unwrap arguments :param", "mask.dtype != np.dtype(bool): raise UnsupportedDataOperationError(\"The result of condition has to be a boolean", "multidimensional mappings, the value variables) Default variant understands pandas DataFrames \"\"\" if hasattr(self.inner_data,", "isinstance(native_object, DataObject): return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type])", "etc., consider a clever proxy attribute) method = getattr(self.inner_data, method_name) result = method(*args,", "bool = False) -> 'DataObject': \"\"\"Choose a subset of a dataset. :param condition:", "query = \"SELECT * FROM data WHERE {0}\".format(condition) return self.sql(query, table_name=\"data\") else: #", "source: From where we obtained the object (kept as weak reference) It is", "hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None @property def columns(self) -> Optional[List[str]]: \"\"\"Column", "methods: - add_column(key, expression, **kwargs) - based on evaluate - \"\"\" @property def", "None class DataObject(_DataObjectRegistry, _DataObjectConversions, _DataObjectInterface): '''A basic object that contains data representable by", "TYPE_CHECKING, Final import blinker import numexpr as ne import numpy as np from", "== self.type_name and conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to", "TODO: Allow to be lambda import numpy as np if not self.size: mask", "Automatically discovers conversion in the form of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\"", "__to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type): raise RuntimeError(\"Invalid use of", "DataConversion.registered_conversions: return False conversion = DataConversion.registered_conversions[(self.type_name, new_type_name)] return conversion.applies(self) @classmethod def is_convertible_from(cls, data_object:", "Final import blinker import numexpr as ne import numpy as np from boadata.core.data_conversion", "method should be overridden in daughter classes. When called as DataObject.from_uri, it first", "# TODO: check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion", "1) @property def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None @property", "native_method = getattr(self.convert(through), method_name) else: native_method = getattr(self.inner_data, method_name) result = native_method(*args, **kwargs)", "kwargs: :return: Is idempotent \"\"\" if cls == DataObject: if isinstance(native_object, DataObject): return", "This method should be overridden in daughter classes. When called as DataObject.from_uri, it", "Any = None, uri: str = None, source: 'DataObject' = None, **kwargs): if", ":param unwrap_args: Whether to unwrap arguments :param same_class: Whether to try to convert", "TODO: check argument? new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion =", "inner_data: Any = None, uri: str = None, source: 'DataObject' = None, **kwargs):", "as DataObject.from_uri, it first tries to find an appropriate class by checking all", "type]: \"\"\"Decorator that registers the data type :param default: Whether to serve as", "return data_object.is_convertible_to(cls) @property def allowed_conversions(self) -> List[Tuple[str, str]]: return [ key for (key,", "conversion.applies(self)] def convert(self, new_type_name: str, **kwargs) -> DataObject: \"\"\"Convert to another boadata-supported type.", "as weak reference) It is necessary to keep all arguments keyword (enforceable in", "__init__(self, inner_data: Any = None, uri: str = None, source: 'DataObject' = None,", "4x3 matrix has dimensionality 2. \"\"\" if hasattr(self.inner_data, \"ndim\"): return int(self.inner_data.ndim) else: return", "if self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of inner data: `{0}`", "instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__ )) self.inner_data = inner_data self.uri = uri", "proxied_method(self, *args, **kwargs): if unwrap_args: args = [boadata.unwrap(arg) for arg in args] kwargs", ":param expression: a valid expression :param wrap: whether to convert back to DataObject", "data objects real_type: ClassVar[Type] = None type_name: ClassVar[str] = None @property def title(self)", "False new_type = DataObject.registered_types[new_type_name] if isinstance(self, new_type): return True if not (self.type_name, new_type_name)", "raise InvalidDataObjectError(\"Invalid type of inner data: `{0}` instead of expected `{1}`\".format( inner_data.__class__.__name__, self.real_type.__name__", "exc if last_exception: raise last_exception raise UnknownDataObjectError(f\"Cannot interpret '{uri}'.\") else: raise UnknownDataObjectError(f\"Cannot interpret", "result of condition has to be a boolean array\") return DataObject.from_native(self.inner_data[mask], source=self) def", "the form of __to_type__ and __from_type__ (see DataConversion.discover) \"\"\" if isinstance(default, type): raise", "class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod def accepts_uri(cls, uri: str) ->", "def size(self) -> int: if hasattr(self.inner_data, \"size\"): return int(self.inner_data.size) else: from operator import", "= DataObject.registered_types[new_type_name] if isinstance(self, new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not", "on columns of the dataset. :param expression: a valid expression :param wrap: whether", "not \"sql\" in dir(self): raise RuntimeError(\"Object {0} does not support SQL.\".format(self.__class__.__name__)) query =", "return native_object.convert(cls.type_name, **kwargs) return cls(inner_data=native_object, **kwargs) def is_convertible_to(self, new_type_name: Union[str, type]) -> bool:", "@staticmethod def register_type(default: bool = False) -> Callable[[type], type]: \"\"\"Decorator that registers the", "Any, **kwargs) -> DataObject: \"\"\" :param native_object: :param kwargs: :return: Is idempotent \"\"\"", "native_object boadata_type = DataObject.registered_default_types.get(type(native_object)) if not boadata_type: raise UnknownDataObjectError(\"Cannot interpret native object of", "- based on evaluate - \"\"\" @property def shape(self) -> Tuple[int, ...]: \"\"\"Shape", "def dtype(self): if hasattr(self.inner_data, \"dtype\"): return self.inner_data.dtype else: return None @property def columns(self)", "= None, **kwargs): if self.real_type and not isinstance(inner_data, self.real_type): raise InvalidDataObjectError(\"Invalid type of", "evaluated as sql WHERE clause \"\"\" if sql: if not \"sql\" in dir(self):", "\"\"\" # TODO: Check that it is ok (see proxy etc., consider a", "import ConversionUnknown, DataConversion if TYPE_CHECKING: from typing import Any, ClassVar, List, Optional, Tuple,", "True return boadata_type return wrap class _DataObjectConversions: \"\"\"DataObject methods related to conversions.\"\"\" @classmethod", "self[col].inner_data for col in self.columns if isinstance(col, str) } global_dict = { \"nan\"", "uri: str, **kwargs) -> DataObject: \"\"\"\"Create an object of this class from an", "all arguments keyword (enforceable in Python 3). ''' def __init__(self, inner_data: Any =", "convert back to DataObject or return the native result Based on numexpr library", "DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available = [key[1] for key in DataConversion.registered_conversions.keys() if", "} global_dict = { \"nan\" : np.nan, \"inf\" : np.inf } result =", "hasattr(self.inner_data, \"columns\"): return list(self.inner_data.columns.values) else: return None @property def name(self) -> Optional[str]: if", "proxied_method for method_name in method_names: setattr(boadata_type, method_name, make_method(method_name)) return boadata_type return wrapper def", "result :param unwrap_args: Whether to unwrap arguments :param same_class: Whether to try to", "new_type): return self conversion = DataConversion.registered_conversions.get((self.__class__.type_name, new_type_name)) if not conversion: available = [key[1]", "bool = True, through: Optional[type] = None): \"\"\"Decorator to apply on DataObject descendants." ]
[ "= Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8),", "self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r", "c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1)", "test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a", "self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a)", "= BinData.from_spaced_hex(8, '12 34 56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b,", "low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11", "'.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8)", "Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2),", "44 55 66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b,", "34 56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678'))", "1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34 56 78 9a')", "= r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r", "r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12)", "Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2)", "b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r", "77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877'))", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r =", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1) self.assertEqual(b, c) d", "b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r", "this file except in compliance with the License. # You may obtain a", "= Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4),", "BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60)", "12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2)", "self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2, 3, 4,", "test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1)", "'0102 0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1),", "= BinData(8, [1, 2, 3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2)", "66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '223344", "ANY KIND, either express or implied. # See the License for the specific", "60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1),", "to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a =", "'2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32)", "a = BinData(8, [1, 2, 3, 4, 5, 6]) b = r.repack(a, start=1,", "a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2),", "Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2)", "self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a,", "2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8,", "a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2),", "b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1)", "self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8,", "= r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789'))", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1)", "self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit,", "b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self):", "self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9)", "test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2),", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG,", "from veles.data.bindata import BinData from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self):", "r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1)", "BinData(8, [1, 2, 3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b,", "self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8,", "99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self):", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2),", "2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9)", "778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8)", "0504')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201", "78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c =", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8,", "Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a", "OF ANY KIND, either express or implied. # See the License for the", "3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405'))", "77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4'))", "0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self):", "BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d,", "a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1)", "2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23,", "[2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16)", "num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8,", "self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit,", "88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def", "under the License. import unittest from veles.data.bindata import BinData from veles.data.repack import Endian,", "# Copyright 2017 CodiLime # # Licensed under the Apache License, Version 2.0", "77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766'))", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8,", "Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1)", "22 33 44 55 66 77 88 99 aa') b = r.repack(a, 1,", "= Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2)", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "unittest from veles.data.bindata import BinData from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def", "789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8),", "r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1)", "BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8)", "to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2, 3,", "self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20,", "self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1)", "start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1) self.assertEqual(b, c) d", "c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE,", "self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2, 3, 4]) b = r.repack(a, start=1,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7),", "Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE,", "self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2, 3, 4, 5, 6])", "2) a = BinData(8, [1, 2, 3, 4, 5, 6]) b = r.repack(a,", "= Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17),", "= Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8),", "3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321'))", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20)", "self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16,", "r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8),", "def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7),", "c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2),", "<reponame>pombredanne/veles<gh_stars>100-1000 # Copyright 2017 CodiLime # # Licensed under the Apache License, Version", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '223344 667788'))", "1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self):", "self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2, 3, 4, 5, 6]) b =", "= BinData(8, [1, 2, 3, 4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8,", "License. import unittest from veles.data.bindata import BinData from veles.data.repack import Endian, Repacker class", "required by applicable law or agreed to in writing, software # distributed under", "16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678, 0x9abc]) b =", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1)", "a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')", "r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12)", "applicable law or agreed to in writing, software # distributed under the License", "5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c =", "test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a", "3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2),", "= BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56", "Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8,", "'12 34 56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345,", "def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3)", "language governing permissions and # limitations under the License. import unittest from veles.data.bindata", "or agreed to in writing, software # distributed under the License is distributed", "33 44 55 66 77 88 99 aa') b = r.repack(a, 1, 2)", "def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2)", "r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self):", "BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit,", "BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24)", "class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8)", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32)", "= Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2)", "34 56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785'))", "a = BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a, 1, 12) c =", "'0201 0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2),", "4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2, 3,", "self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a)", "8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r =", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "writing, software # distributed under the License is distributed on an \"AS IS\"", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3),", "r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG,", "License. # You may obtain a copy of the License at # #", "'0203 0405')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16,", "r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0)", "num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1) self.assertEqual(b, c) d =", "r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1)", "'345 678')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12,", "2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9)", "import unittest from veles.data.bindata import BinData from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase):", "3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504'))", "23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8,", "compliance with the License. # You may obtain a copy of the License", "60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a =", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE,", "def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6)", "test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1)", "0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def", "b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c)", "= r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r =", "1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self):", "for the specific language governing permissions and # limitations under the License. import", "self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1),", "6) a = BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b,", "= Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2),", "8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a =", "'012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1),", "= Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4),", "1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2, 3, 4, 5,", "high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11", "self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit,", "Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a", "= BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78", "test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7),", "not use this file except in compliance with the License. # You may", "self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1)", "def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1)", "aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r", "r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0)", "c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403", "License, Version 2.0 (the \"License\"); # you may not use this file except", "[0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r =", "b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1)", "r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8),", "'412 563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1),", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2),", "def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8)", "r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1)", "'54321 a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1),", "a = BinData.from_spaced_hex(8, '12 34 56 78 9a') b = r.repack(a, 1, 2)", "r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1) self.assertEqual(b, c)", "# you may not use this file except in compliance with the License.", "def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1)", "99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self):", "agreed to in writing, software # distributed under the License is distributed on", "r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2)", "0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2),", "start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE,", "a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def", "BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78", "(the \"License\"); # you may not use this file except in compliance with", "self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit,", "3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb'))", "3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8)", "1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r", "aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r", "self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b", "66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219", "# Unless required by applicable law or agreed to in writing, software #", "c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r =", "Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1,", "by applicable law or agreed to in writing, software # distributed under the", "3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a, 1,", "'634 785')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12,", "2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a =", "r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1) self.assertEqual(b, c)", "r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a =", "34 56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b')))", "self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60,", "0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4)", "'554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2),", "governing permissions and # limitations under the License. import unittest from veles.data.bindata import", "2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1,", "test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3),", "'78 56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3),", "b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c)", "'11 22 33 44 55 66 77 88 99 aa') b = r.repack(a,", "r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1)", "file except in compliance with the License. # You may obtain a copy", "= Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16,", "Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1)", "self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a)", "a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def", "License for the specific language governing permissions and # limitations under the License.", "= Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8,", "test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7),", "self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a", "self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876", "'1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32)", "1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8,", "1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8)", "to in writing, software # distributed under the License is distributed on an", "implied. # See the License for the specific language governing permissions and #", "56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b,", "2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9)", "r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def", "\"License\"); # you may not use this file except in compliance with the", "self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit,", "self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8,", "8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3),", "aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r", "2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34 56 78 9a') b =", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23,", "def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1)", "2, 3, 4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a),", "24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2)", "66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455", "BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24)", "6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a,", "self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit,", "[0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r =", "BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, '", "BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit,", "= BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self):", "or implied. # See the License for the specific language governing permissions and", "the License. import unittest from veles.data.bindata import BinData from veles.data.repack import Endian, Repacker", "99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self):", "self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r", "the specific language governing permissions and # limitations under the License. import unittest", "specific language governing permissions and # limitations under the License. import unittest from", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self):", "self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2,", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self):", "Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2,", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23,", "in writing, software # distributed under the License is distributed on an \"AS", "2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1) self.assertEqual(b, c) d =", "test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9),", "32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22 33", "88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def", "88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def", "BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r", "88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def", "c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r =", "= Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16,", "BinData(8, [1, 2, 3, 4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2,", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2)", "self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678, 0x9abc]) b", "BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16)", "d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE,", "9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a,", "Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a", "8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2, 3, 4]) b", "self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r", "678')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123", "and # limitations under the License. import unittest from veles.data.bindata import BinData from", "3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8,", "import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r =", "Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3)", "self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4),", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a)", "r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20)", "= r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16,", "56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b,", "fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2),", "b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG,", "2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321", "self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2)", "0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2),", "23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a =", "c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG,", "BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r", "self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit,", "= BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self):", "use this file except in compliance with the License. # You may obtain", "test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a", "087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8)", "start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self):", "= r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1) self.assertEqual(b,", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1) self.assertEqual(b, c) d", "veles.data.bindata import BinData from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE,", "0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34 56", "1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r", "self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22 33 44 55 66", "self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88", "= Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4),", "test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1)", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a", "self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r", "Copyright 2017 CodiLime # # Licensed under the Apache License, Version 2.0 (the", "self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22", "56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2)", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG,", "BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32)", "Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2)", "2.0 (the \"License\"); # you may not use this file except in compliance", "BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16)", "'12 34 56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634", "[1, 2, 3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "BinData.from_spaced_hex(8, '12 34 56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12,", "[0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a'))", "def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7),", "2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a =", "8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678, 0x9abc])", "# # Unless required by applicable law or agreed to in writing, software", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1) self.assertEqual(b, c)", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG,", "express or implied. # See the License for the specific language governing permissions", "def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1)", "= r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78'))", "def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3)", "a = BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8,", "low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8,", "aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r", "CodiLime # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563", "'56 78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32)", "333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2),", "either express or implied. # See the License for the specific language governing", "4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def", "2) a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99", "d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG,", "c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8),", "BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit,", "6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a,", "BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d,", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3)", "self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a,", "= r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def", "785')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412", "start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self):", "r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def", "BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit,", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1) self.assertEqual(b,", "r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16)", "def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7),", "TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit,", "= Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17),", "= r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r =", "BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32)", "the License. # You may obtain a copy of the License at #", "[0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc'))", "self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22 33 44", "4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c", "self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2, 3, 4]) b =", "def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8)", "def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1)", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1) self.assertEqual(b, c)", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899'))", "d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c =", "from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a", "8) a = BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a, 1, 12) c", "' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit,", "r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r =", "self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4)", "a = BinData(8, [1, 2, 3, 4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b,", "9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a,", "r = Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1)", "BinData from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def", "1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1, 2, 3, 4, 5, 6]) b", "self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a", "self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit,", "from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self):", "456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2)", "12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r =", "self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2)", "= BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60,", "r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def", "20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3)", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8,", "aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r", "# limitations under the License. import unittest from veles.data.bindata import BinData from veles.data.repack", "2) a = BinData(8, [1, 2, 3, 4]) b = r.repack(a, start=1, num_elements=2)", "with the License. # You may obtain a copy of the License at", "import BinData from veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG)", "self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34 56') b", "BinData(16, [0x1234, 0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56", "Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234,", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "2, 3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203", "99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self):", "start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1) self.assertEqual(b, c) d", "limitations under the License. import unittest from veles.data.bindata import BinData from veles.data.repack import", "'fedcb a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3),", "0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc')) def test_split_16to8_big(self):", "= r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb')) def test_split_60to20_big(self): r = Repacker(Endian.BIG, 60,", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1)", "r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8)", "'12 34 56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345", "= Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2)", "'334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2),", "law or agreed to in writing, software # distributed under the License is", "self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a) def test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16)", "the License for the specific language governing permissions and # limitations under the", "8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22 33 44 55", "= BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34 56')", "' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit,", "66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433", "r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8),", "9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8)", "= r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506'))", "self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899')) def test_padded_8to23_right_big(self): r = Repacker(Endian.BIG, 8, 23, low_pad=9) self.assertEqual(r.repack_unit,", "num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1) self.assertEqual(b, c) d =", "in compliance with the License. # You may obtain a copy of the", "test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9),", "r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1) self.assertEqual(b, c) d =", "Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2)", "test_gather_8to16_little(self): r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3),", "r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE,", "1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77", "b = r.repack(a) self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE,", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r =", "See the License for the specific language governing permissions and # limitations under", "BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b'))", "test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1),", "[1, 2, 3, 4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3]))", "56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '634 785')) c", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "'12 34 56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634,", "'123 456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12),", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a =", "563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2)", "= r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r = Repacker(Endian.LITTLE, 8,", "a9876 54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2)", "8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a =", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE,", "test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1),", "12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r =", "= Repacker(Endian.LITTLE, 8, 12) self.assertEqual(r.repack_unit, 24) self.assertEqual(r.repack_size(1), 2) self.assertEqual(r.repack_size(2), 3) self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2),", "r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2)", "= Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4),", "= r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8,", "1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b =", "= r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c = r.repack(a, 1) self.assertEqual(b,", "def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6)", "1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321]) b = r.repack(a) self.assertEqual(b,", "r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8,", "2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2, 3, 4]) b = r.repack(a,", "1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2) self.assertEqual(r.repackable_size(1), 3) a = BinData(60, [0xfedcba987654321])", "Version 2.0 (the \"License\"); # you may not use this file except in", "def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8)", "except in compliance with the License. # You may obtain a copy of", "23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8,", "'0302 0504')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16,", "2) self.assertEqual(r.repack_size(17), 3) self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34 56') b =", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r =", "d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE,", "66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2", "self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34 56 78 9a') b = r.repack(a,", "34 56') b = r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1]))", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4')) def test_padded_8to23_mixed_big(self): r = Repacker(Endian.BIG, 8, 23,", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "= r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78')) def test_mash_8to12_big(self): r = Repacker(Endian.BIG, 8,", "BinData.from_spaced_hex(16, '0201 0403 0605')) def test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16)", "0405')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102", "16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234, 0x5678,", "self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(num_elements=2), 2) self.assertEqual(r.repackable_size(from_size=2), 2) a = BinData(8, [1, 2, 3, 4])", "55 66 77 88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23,", "= r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8,", "BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1)", "2, 3, 4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302", "Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1) self.assertEqual(r.repack_size(9), 2) self.assertEqual(r.repack_size(17), 3)", "veles.data.repack import Endian, Repacker class TestRepacker(unittest.TestCase): def test_endian(self): self.assertNotEqual(Endian.LITTLE, Endian.BIG) def test_simple_copy(self): r", "def test_split_8to1_little(self): r = Repacker(Endian.LITTLE, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12), 2) self.assertEqual(r.repack_size(8), 1)", "self.assertEqual(r.repackable_size(1), 0) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34", "2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=8,", "permissions and # limitations under the License. import unittest from veles.data.bindata import BinData", "self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a)", "BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa') b =", "b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877')) def test_padded_8to23_left_big(self): r =", "test_gather_8to16_big(self): r = Repacker(endian=Endian.BIG, from_width=8, to_width=16) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3),", "1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b')) def test_padded_8to23_mixed_little(self): r = Repacker(Endian.LITTLE, 8, 23,", "test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1)", "56 78 9a') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(12, '345 678')) c", "'443322 087766')) def test_padded_8to23_right_little(self): r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2),", "r = Repacker(Endian.LITTLE, 8, 23, low_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8),", "2017 CodiLime # # Licensed under the Apache License, Version 2.0 (the \"License\");", "88 99 aa') b = r.repack(a, 1, 2) self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766')) def", "Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2)", "'.join(format(0x345, '012b'))) self.assertEqual(b, c) def test_split_60to20_little(self): r = Repacker(Endian.LITTLE, 60, 20) self.assertEqual(r.repack_unit, 60)", "3, 4]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData(8, [2, 3])) self.assertEqual(r.repack(a), a)", "Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a = BinData(16, [0x1234,", "high_pad=1) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2) a = BinData.from_spaced_hex(8, '11", "c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789')) def test_split_8to1_little(self): r =", "r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3), 6) a =", "= r.repack(a, 1, 12) c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b'))) self.assertEqual(b, c) def", "3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def test_padded_8to23_left_little(self): r = Repacker(Endian.LITTLE, 8, 23,", "self.assertEqual(r.repackable_size(1), 8) a = BinData.from_spaced_hex(8, '12 34 56') b = r.repack(a, 1, 12)", "= Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8) self.assertEqual(r.repackable_size(7), 1) self.assertEqual(r.repackable_size(8), 2)", "= r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605'))", "= r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c = r.repack(a, start=1) self.assertEqual(b,", "16) self.assertEqual(r.repack_size(2), 4) self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 1) self.assertEqual(r.repackable_size(4), 2) a = BinData(8, [1,", "= BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa') b", "c = r.repack(a, start=1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304", "198877')) def test_padded_8to23_left_big(self): r = Repacker(Endian.BIG, 8, 23, high_pad=9) self.assertEqual(r.repack_unit, 32) self.assertEqual(r.repack_size(2), 8)", "2) a = BinData.from_spaced_hex(8, '12 34 56 78 9a') b = r.repack(a, 1,", "BinData.from_spaced_hex(12, '634 785')) c = r.repack(a, 1) self.assertEqual(b, c) d = r.repack(a) self.assertEqual(d,", "c) d = r.repack(a) self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506')) def test_mash_8to12_little(self): r =", "Repacker(Endian.BIG, 60, 20) self.assertEqual(r.repack_unit, 60) self.assertEqual(r.repack_size(1), 1) self.assertEqual(r.repack_size(2), 1) self.assertEqual(r.repack_size(3), 1) self.assertEqual(r.repack_size(4), 2)", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "0x5678, 0x9abc]) b = r.repack(a, 1, 3) self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a')) def", "54321')) def test_split_16to8_little(self): r = Repacker(Endian.LITTLE, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3),", "5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504')) c =", "'012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8, 1) self.assertEqual(r.repack_unit, 8) self.assertEqual(r.repack_size(12),", "= BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1])) self.assertEqual(b, c) def test_split_8to1_big(self): r = Repacker(Endian.BIG, 8,", "bc')) def test_split_16to8_big(self): r = Repacker(Endian.BIG, 16, 8) self.assertEqual(r.repack_unit, 16) self.assertEqual(r.repack_size(3), 2) self.assertEqual(r.repackable_size(3),", "self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34 56 78 9a') b", "4, 5, 6]) b = r.repack(a, start=1, num_elements=2) self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405')) c", "self.assertEqual(r.repackable_size(2), 1) self.assertEqual(r.repackable_size(3), 2) self.assertEqual(r.repackable_size(4), 2) a = BinData.from_spaced_hex(8, '12 34 56 78" ]
[ "import CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4 on x') t = CArray.arange(0.01,", "x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 * (-t / 10.0).exp(), basex=2)", "from secml.figure import CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4 on x') t", "CFigure(fontsize=14) fig.title('loglog base 4 on x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20", "fig.title('loglog base 4 on x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 *", "import CArray from secml.figure import CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4 on", "CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4 on x') t = CArray.arange(0.01, 20.0,", "secml.figure import CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4 on x') t =", "4 on x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 * (-t /", "on x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 * (-t / 10.0).exp(),", "t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 * (-t / 10.0).exp(), basex=2) fig.sp.grid()", "= CFigure(fontsize=14) fig.title('loglog base 4 on x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t,", "base 4 on x') t = CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 * (-t", "secml.array import CArray from secml.figure import CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4", "fig = CFigure(fontsize=14) fig.title('loglog base 4 on x') t = CArray.arange(0.01, 20.0, 0.01)", "= CArray.arange(0.01, 20.0, 0.01) fig.sp.loglog(t, 20 * (-t / 10.0).exp(), basex=2) fig.sp.grid() fig.show()", "CArray from secml.figure import CFigure fig = CFigure(fontsize=14) fig.title('loglog base 4 on x')", "from secml.array import CArray from secml.figure import CFigure fig = CFigure(fontsize=14) fig.title('loglog base" ]
[ "return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get", "(3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi) if theta.shape != phi.shape:", "and phi must have the same shape\") thx = np.cos(phi) * np.cos(theta) thy", "the phi hat unit vectors in cartesian coordinates for points on a sphere.", "array-like of float 3 element unit vector specifying the axis to rotate around.", "and r2 must be length 3 vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2):", "r2 must be length 3 vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2): raise", "axis. rot_matrix : array-like of float Rotation matrix that takes 3-vectors from (theta,", "to the beta/alpha basis. \"\"\" if alpha is None or beta is None:", "be a unit vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]],", "rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get the theta hat unit vectors in", "for a point on the sphere, in radians. Azimuth is defined with respect", "is defined with respect to the x axis, co-latitude is the angle with", "the same shape\") phx = -np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi) return", "= 1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha < 0:", "p_hat[0]) if alpha < 0: alpha += 2.0 * np.pi return (beta, alpha)", "not a unit vector: \" + str(r2) else: r1 = np.array(r1) r2 =", "axis : array-like of float 3 element unit vector specifying the axis to", "basis vectors associated with (`theta`, `phi`) to those associated with (`beta`, `alpha`). Parameters", "raise ValueError( \"Either r1 and r2 must be supplied or all of \"", "ValueError(\"r1 and r2 must be unit vectors\") norm = np.cross(r1, r2) # Note", "np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical", "np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix =", "3-clause BSD License \"\"\"Methods for doing spherical coordinate transformations on vectors.\"\"\" import numpy", "phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must be", "theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation matrix that connects two points", "x axis, co-latitude is the angle with the positive z axis. Ignored if", "Parameters ---------- theta, phi : float The co-latitude and azimuth coordinates, respectively, for", "float Array of phi hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta)", "in the rotated frame. \"\"\" # This is NOT written to be vectorized", "theta hat unit vectors in cartesian coordinates for points on a sphere. Parameters", "`alpha`). Parameters ---------- theta, phi : float The co-latitude and azimuth coordinates, respectively,", "alpha : float, optional The theta, phi coordinates for the point on the", "ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation", "ValueError(\"theta and phi must have the same shape\") thx = np.cos(phi) * np.cos(theta)", "orthogonality. Parameters ---------- matrix : array-like of float square matrix to test Returns", "shape\") rhx = np.cos(phi) * np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz =", "the point under a 3d rotation. Finds the spherical coordinates for point p", "accuracy of this method may not be good enough near pols in either", "np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat", "positive z axis. Returns ------- beta, alpha : float The theta, phi coordinates", "def phi_hat(theta, phi): \"\"\" Get the phi hat unit vectors in cartesian coordinates", "beta, alpha : float The theta, phi coordinates for the point on the", "the 3-clause BSD License \"\"\"Methods for doing spherical coordinate transformations on vectors.\"\"\" import", "np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for", "1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0])", "appropriately converted, and the point to which it is transformed in another standard", "r1 and r2 are supplied. Returns ------- array 3x3 rotation matrix that rotates", "def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors. Parameters ---------- vec : array-like", ": array-like of float rotation matrix to use theta, phi : float The", "the theta/phi basis to the beta/alpha basis. \"\"\" if alpha is None or", "raise ValueError(\"r1 and r2 must be unit vectors\") norm = np.cross(r1, r2) #", "z axis. Returns ------- array of float Array of theta hat vectors, shape", "return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical coordinates", "* np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix", "rotation matrix that rotates the first point or vector into the other. \"\"\"", "calculated from astropy. Returns ------- array of float 2 x 2 rotation matrix", "theta.shape != phi.shape: raise ValueError(\"theta and phi must have the same shape\") thx", "rotation matrix formula. Parameters ---------- axis : array-like of float 3 element unit", "and the point to which it is transformed in another standard mathematical coordinate", "r1 = r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not", "unit vectors\") norm = np.cross(r1, r2) # Note that Psi is between 0", "np.cos(phi) * np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return np.stack((rhx,", "rotate by in radians Returns ------- array 3x3 rotation matrix to rotate vectors", "unit vectors theta1, phi1, theta2, phi2 : float, optional The co-latitude and azimuth", "(`beta`, `alpha`). Parameters ---------- theta, phi : float The co-latitude and azimuth coordinates,", "is not a unit vector: \" + str(r1) assert is_unit_vector(r2), \"r2 is not", "vectors associated with (`theta`, `phi`) to those associated with (`beta`, `alpha`). Parameters ----------", "must have the same shape\") rhx = np.cos(phi) * np.sin(theta) rhy = np.sin(phi)", "in the theta/phi basis to a new reference frame. Given a position (`theta`,", "of float 2 x 2 rotation matrix that takes vectors in the theta/phi", "if Psi = 0. cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation", "Get the spherical coordinates of the point under a 3d rotation. Finds the", "to those associated with (`beta`, `alpha`). Parameters ---------- theta, phi : float The", "r_hat(theta, phi): \"\"\" Get the r hat unit vectors in cartesian coordinates for", "if r1.shape != (3,) or r2.shape != (3,): raise ValueError(\"r1 and r2 must", "False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test", ": array-like of float, optional length 3 unit vectors theta1, phi1, theta2, phi2", "`angle` around `axis`. \"\"\" if axis.shape != (3,): raise ValueError(\"axis must be a", "array-like of float Rotation matrix that takes 3-vectors from (theta, phi) to (beta,", "on the sphere (using normal mathematical conventions) in the rotated frame. \"\"\" #", ") return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters ----------", "# Trouble lurks if Psi = 0. cosPsi = np.dot(r1, r2) Psi =", "the other. \"\"\" if r1 is None or r2 is None: if theta1", "thz)) def phi_hat(theta, phi): \"\"\" Get the phi hat unit vectors in cartesian", "is None or theta2 is None or phi2 is None: raise ValueError( \"Either", "np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must be a 3x3 array\")", "vectors in the theta/phi basis to the beta/alpha basis. \"\"\" if alpha is", "beta/alpha basis. \"\"\" if alpha is None or beta is None: beta, alpha", "array-like of float, optional length 3 unit vectors theta1, phi1, theta2, phi2 :", "on vectors.\"\"\" import numpy as np def r_hat(theta, phi): \"\"\" Get the r", "and r2 are supplied. Returns ------- array 3x3 rotation matrix that rotates the", "* np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1,", "is None or beta is None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th", "= np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX,", "Azimuth is defined with respect to the x axis, co-latitude is the angle", "on the sphere. Parameters ---------- r1, r2 : array-like of float, optional length", "the rotated frame. If either is not provided, they are calculated using `rotate_points_3d`.", "/ sinPsi # Trouble lurks if Psi = 0. cosPsi = np.dot(r1, r2)", "array of float 2 x 2 rotation matrix that takes vectors in the", "(c) 2019 Radio Astronomy Software Group # Licensed under the 3-clause BSD License", "phi): \"\"\" Get the phi hat unit vectors in cartesian coordinates for points", "3 element unit vector specifying the axis to rotate around. angle : float", "float The theta, phi coordinates for the point on the sphere (using normal", "matrix that connects two points or unit vectors on the sphere. Parameters ----------", "frame. Given a position (`theta`, `phi`) in “standard mathematical” coordinates (0 < `theta`", "matrix to use theta, phi : float The co-latitude and azimuth coordinates, respectively,", "Test for unit vectors. Parameters ---------- vec : array-like of float vector to", "vectors in the theta/phi basis to a new reference frame. Given a position", "shape\") thx = np.cos(phi) * np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz =", "+ np.sin(angle) * K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return", "or unit vectors on the sphere. Parameters ---------- r1, r2 : array-like of", "r hat unit vectors in cartesian coordinates for points on a sphere. Parameters", "\" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not orthogonal: \" + str(", "x 3 rotation matrix `rot_matrix` which connects those two points, calculate the rotation", "mathematical conventions) in the rotated frame. If either is not provided, they are", "np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\"", "conventions) in the rotated frame. \"\"\" # This is NOT written to be", "which will typically be local telescope Alt/Az appropriately converted, and a 3 x", "axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix =", "angle): \"\"\" Get the rotation matrix using Rodrigues' rotation matrix formula. Parameters ----------", "phy = np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta,", "1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation", "a unit vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not orthogonal:", "K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] )", "r1.shape != (3,) or r2.shape != (3,): raise ValueError(\"r1 and r2 must be", "theta, phi : float The co-latitude and azimuth coordinates, respectively, for a point", "Parameters ---------- rot_matrix : array-like of float rotation matrix to use theta, phi", "array 3x3 rotation matrix that rotates the first point or vector into the", "raise ValueError(\"theta and phi must have the same shape\") rhx = np.cos(phi) *", "vector to test Returns ------- bool True if `vec` is a unit vector,", "r2 are supplied. Returns ------- array 3x3 rotation matrix that rotates the first", "pi, 0 < `phi` < 2 pi) which will typically be an ICRS", "Array of phi hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi", "all of \" \"theta1, phi1, theta2 and phi2 must be supplied.\" ) r1", "unit vectors. Parameters ---------- vec : array-like of float vector to test Returns", "rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical coordinates of the point under a", "vector\") if not is_unit_vector(axis): raise ValueError(\"axis must be a unit vector\") K_matrix =", "the positive z axis. rot_matrix : array-like of float Rotation matrix that takes", "\"\"\" if alpha is None or beta is None: beta, alpha = rotate_points_3d(rot_matrix,", "Get the rotation matrix that connects two points or unit vectors on the", "r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not a unit vector: \" + str(r1)", "np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1],", "np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta, phi):", "if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha =", "p specified by p = R . q, where q is the 3D", "if theta.shape != phi.shape: raise ValueError(\"theta and phi must have the same shape\")", "= np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix", "assert is_unit_vector(n_hat), \"n_hat is not a unit vector: \" + str(n_hat) assert is_orthogonal(rotation),", "= np.array(r1) r2 = np.array(r2) if r1.shape != (3,) or r2.shape != (3,):", "np.array(r2) if r1.shape != (3,) or r2.shape != (3,): raise ValueError(\"r1 and r2", "radians. Azimuth is defined with respect to the x axis, co-latitude is the", "th = theta_hat(theta, phi) ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta,", "the 3D rotation matrix that relates two coordinate charts. The accuracy of this", "float vector to test Returns ------- bool True if `vec` is a unit", "enough near pols in either coordinate system. Parameters ---------- rot_matrix : array-like of", "------- array of float Array of theta hat vectors, shape (3, Npoints) \"\"\"", "None or r2 is None: if theta1 is None or phi1 is None", "same shape\") rhx = np.cos(phi) * np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz", "a new reference frame. Given a position (`theta`, `phi`) in “standard mathematical” coordinates", "= np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi):", "`rot_matrix` which connects those two points, calculate the rotation matrix which rotates the", "R . q, where q is the 3D position vector of the point", "np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta and phi must have the same", "if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must be a 3x3 array\") #", "def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix using Rodrigues' rotation matrix formula.", "np def r_hat(theta, phi): \"\"\" Get the r hat unit vectors in cartesian", "q_hat) # Should write a function to do this as well, i.e., pull", "under a 3d rotation. Finds the spherical coordinates for point p specified by", ": float, optional The co-latitude and azimuth coordinates, respectively, for a point on", "matrix to test Returns ------- bool True if `matrix` is orthogonal, False otherwise.", "if theta1 is None or phi1 is None or theta2 is None or", "<gh_stars>1-10 # -*- mode: python; coding: utf-8 -* # Copyright (c) 2019 Radio", "of r hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi =", "return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get the theta hat unit", "the rotated frame. \"\"\" # This is NOT written to be vectorized for", "RA/Dec coordinate appropriately converted, and the point to which it is transformed in", "* np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy,", "the angle with the positive z axis. rot_matrix : array-like of float Rotation", "from astropy. Returns ------- array of float 2 x 2 rotation matrix that", "Returns ------- beta, alpha : float The theta, phi coordinates for the point", "= rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi) ph = phi_hat(theta, phi) bh", "= np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta and phi must have the", "float Array of theta hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta)", "= np.cos(phi) * np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return", "for multiple (theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise", "np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get", "raise ValueError(\"rot_matrix must be a 3x3 array\") # Replace with function call? q_hat_1", "cartesian coordinates for points on a sphere. Parameters ---------- theta, phi : float", "the angle with the positive z axis. Returns ------- beta, alpha : float", "phi): \"\"\" Get the spherical coordinates of the point under a 3d rotation.", "specified by (theta,phi) and R is the 3D rotation matrix that relates two", "axis. Returns ------- array of float Array of phi hat vectors, shape (3,", "(`beta`, `alpha`), which will typically be local telescope Alt/Az appropriately converted, and a", "provided, they are calculated using `rotate_points_3d`. Note these may not be as exact", "3 unit vectors theta1, phi1, theta2, phi2 : float, optional The co-latitude and", "raise ValueError(\"r1 and r2 must be length 3 vectors\") if not is_unit_vector(r1) or", "This is NOT written to be vectorized for multiple (theta, phi) rot_matrix =", "the rotation matrix which rotates the basis vectors associated with (`theta`, `phi`) to", "is NOT written to be vectorized for multiple (theta, phi) rot_matrix = np.array(rot_matrix)", "beta=None, alpha=None ): \"\"\" Get the rotation matrix to take vectors in the", "np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for", "thy, thz)) def phi_hat(theta, phi): \"\"\" Get the phi hat unit vectors in", "Get the rotation matrix using Rodrigues' rotation matrix formula. Parameters ---------- axis :", "of float, optional length 3 unit vectors theta1, phi1, theta2, phi2 : float,", "otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for", "with the positive z axis. Returns ------- array of float Array of theta", "rotated frame. \"\"\" # This is NOT written to be vectorized for multiple", "vector of the point specified by (theta,phi) and R is the 3D rotation", "return np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\" Get the phi hat unit", "= np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) #", "---------- axis : array-like of float 3 element unit vector specifying the axis", "of the point under a 3d rotation. Finds the spherical coordinates for point", "around. angle : float angle to rotate by in radians Returns ------- array", "takes 3-vectors from (theta, phi) to (beta, alpha) beta, alpha : float, optional", "pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi # Trouble lurks", "as exact as values calculated from astropy. Returns ------- array of float 2", "vec : array-like of float vector to test Returns ------- bool True if", "supplied. Returns ------- array 3x3 rotation matrix that rotates the first point or", "if not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and r2 must be unit", "(beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get the", "= norm / sinPsi # Trouble lurks if Psi = 0. cosPsi =", "near pols in either coordinate system. Parameters ---------- rot_matrix : array-like of float", "is_unit_vector(r1), \"r1 is not a unit vector: \" + str(r1) assert is_unit_vector(r2), \"r2", "function call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3", "np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors. Parameters", "phi): \"\"\" Get the r hat unit vectors in cartesian coordinates for points", "they are calculated using `rotate_points_3d`. Note these may not be as exact as", "this method may not be good enough near pols in either coordinate system.", "if r1 is None or r2 is None: if theta1 is None or", "the rotation matrix using Rodrigues' rotation matrix formula. Parameters ---------- axis : array-like", "is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and r2 must be unit vectors\") norm", "np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors. Parameters ---------- vec", "as well, i.e., pull back angles from # a vector if np.isclose(p_hat[2], 1.0,", "[-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix using Rodrigues' rotation", "-axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix = ( I_matrix +", "sinPsi # Trouble lurks if Psi = 0. cosPsi = np.dot(r1, r2) Psi", "optional The theta, phi coordinates for the point on the sphere (using normal", "): \"\"\" Get the rotation matrix to take vectors in the theta/phi basis", "phi) ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX =", "def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters ---------- matrix : array-like", "takes vectors in the theta/phi basis to the beta/alpha basis. \"\"\" if alpha", "= np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi # Trouble lurks if Psi", "into the other. \"\"\" if r1 is None or r2 is None: if", "3-vectors from (theta, phi) to (beta, alpha) beta, alpha : float, optional The", "a 3d rotation. Finds the spherical coordinates for point p specified by p", "the beta/alpha basis. \"\"\" if alpha is None or beta is None: beta,", "the same shape\") thx = np.cos(phi) * np.cos(theta) thy = np.sin(phi) * np.cos(theta)", "which connects those two points, calculate the rotation matrix which rotates the basis", "np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh,", "* K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix def", "np.array(theta) phi = np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta and phi must", "r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not a unit", "unit vector specifying the axis to rotate around. angle : float angle to", "take vectors in the theta/phi basis to a new reference frame. Given a", "-np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\" Get the phi hat", "rotation matrix `rot_matrix` which connects those two points, calculate the rotation matrix which", "the positive z axis. Returns ------- beta, alpha : float The theta, phi", "with the positive z axis. rot_matrix : array-like of float Rotation matrix that", ": float The co-latitude and azimuth coordinates, respectively, for a point on the", "\"\"\" Get the rotation matrix to take vectors in the theta/phi basis to", "vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation matrix that connects", "normal mathematical conventions) in the rotated frame. If either is not provided, they", "None or phi1 is None or theta2 is None or phi2 is None:", "np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2,", "r2 must be unit vectors\") norm = np.cross(r1, r2) # Note that Psi", "# Note that Psi is between 0 and pi sinPsi = np.sqrt(np.dot(norm, norm))", "python; coding: utf-8 -* # Copyright (c) 2019 Radio Astronomy Software Group #", "NOT written to be vectorized for multiple (theta, phi) rot_matrix = np.array(rot_matrix) if", "will typically be local telescope Alt/Az appropriately converted, and a 3 x 3", "sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi # Trouble lurks if", "is the 3D rotation matrix that relates two coordinate charts. The accuracy of", "position vector of the point specified by (theta,phi) and R is the 3D", "rotated frame. If either is not provided, they are calculated using `rotate_points_3d`. Note", "write a function to do this as well, i.e., pull back angles from", "is the angle with the positive z axis. rot_matrix : array-like of float", "that rotates the first point or vector into the other. \"\"\" if r1", "None: raise ValueError( \"Either r1 and r2 must be supplied or all of", "= r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not a unit vector: \" +", "not is_unit_vector(r2): raise ValueError(\"r1 and r2 must be unit vectors\") norm = np.cross(r1,", "and phi must have the same shape\") phx = -np.sin(phi) phy = np.cos(phi)", "on a sphere. Parameters ---------- theta, phi : float The co-latitude and azimuth", "phi : float The co-latitude and azimuth coordinates, respectively, for a point on", "pols in either coordinate system. Parameters ---------- rot_matrix : array-like of float rotation", "to the x axis, co-latitude is the angle with the positive z axis.", "(beta, alpha) beta, alpha : float, optional The theta, phi coordinates for the", "of phi hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi =", ") I_matrix = np.identity(3) rot_matrix = ( I_matrix + np.sin(angle) * K_matrix +", "with the positive z axis. Ignored if r1 and r2 are supplied. Returns", "2019 Radio Astronomy Software Group # Licensed under the 3-clause BSD License \"\"\"Methods", "point or vector into the other. \"\"\" if r1 is None or r2", "r2 is None: if theta1 is None or phi1 is None or theta2", "rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must be a", "Astronomy Software Group # Licensed under the 3-clause BSD License \"\"\"Methods for doing", "(3,) or r2.shape != (3,): raise ValueError(\"r1 and r2 must be length 3", "= np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must be a 3x3", "theta = np.array(theta) phi = np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta and", "vector: \" + str(r1) assert is_unit_vector(r2), \"r2 is not a unit vector: \"", "None or theta2 is None or phi2 is None: raise ValueError( \"Either r1", "astropy. Returns ------- array of float 2 x 2 rotation matrix that takes", "charts. The accuracy of this method may not be good enough near pols", "the rotation matrix to take vectors in the theta/phi basis to a new", "(0 < `theta` < pi, 0 < `phi` < 2 pi) which will", "------- bool True if `matrix` is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T),", "+ str(r1) assert is_unit_vector(r2), \"r2 is not a unit vector: \" + str(r2)", "phi.shape: raise ValueError(\"theta and phi must have the same shape\") thx = np.cos(phi)", "to test Returns ------- bool True if `vec` is a unit vector, False", "axis.shape != (3,): raise ValueError(\"axis must be a must be length 3 vector\")", "Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi) if theta.shape != phi.shape: raise", "the angle with the positive z axis. Ignored if r1 and r2 are", "the x axis, co-latitude is the angle with the positive z axis. rot_matrix", "positive z axis. rot_matrix : array-like of float Rotation matrix that takes 3-vectors", "q, where q is the 3D position vector of the point specified by", "to (beta, alpha) beta, alpha : float, optional The theta, phi coordinates for", "phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX", "matrix orthogonality. Parameters ---------- matrix : array-like of float square matrix to test", "rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if", "or vector into the other. \"\"\" if r1 is None or r2 is", "co-latitude is the angle with the positive z axis. rot_matrix : array-like of", "the theta hat unit vectors in cartesian coordinates for points on a sphere.", "vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None,", "a unit vector: \" + str(r1) assert is_unit_vector(r2), \"r2 is not a unit", "is None or r2 is None: if theta1 is None or phi1 is", "`alpha`), which will typically be local telescope Alt/Az appropriately converted, and a 3", "!= (3,) or r2.shape != (3,): raise ValueError(\"r1 and r2 must be length", "Array of r hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi", "of float Rotation matrix that takes 3-vectors from (theta, phi) to (beta, alpha)", "are supplied. Returns ------- array 3x3 rotation matrix that rotates the first point", "will typically be an ICRS RA/Dec coordinate appropriately converted, and the point to", "axis to rotate around. angle : float angle to rotate by in radians", "vectorized for multiple (theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3, 3):", "ValueError(\"axis must be a unit vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2],", "or theta2 is None or phi2 is None: raise ValueError( \"Either r1 and", "spherical coordinates for point p specified by p = R . q, where", "to which it is transformed in another standard mathematical coordinate system (`beta`, `alpha`),", "hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi) if", "Returns ------- array of float Array of r hat vectors, shape (3, Npoints)", "well, i.e., pull back angles from # a vector if np.isclose(p_hat[2], 1.0, rtol=0.0,", "from (theta, phi) to (beta, alpha) beta, alpha : float, optional The theta,", "\"\"\" Get the spherical coordinates of the point under a 3d rotation. Finds", "# This is NOT written to be vectorized for multiple (theta, phi) rot_matrix", "transformations on vectors.\"\"\" import numpy as np def r_hat(theta, phi): \"\"\" Get the", "as np def r_hat(theta, phi): \"\"\" Get the r hat unit vectors in", "system (`beta`, `alpha`), which will typically be local telescope Alt/Az appropriately converted, and", "x axis, co-latitude is the angle with the positive z axis. Returns -------", "np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix using", "ValueError(\"axis must be a must be length 3 vector\") if not is_unit_vector(axis): raise", "!= (3, 3): raise ValueError(\"rot_matrix must be a 3x3 array\") # Replace with", "z axis. Returns ------- beta, alpha : float The theta, phi coordinates for", "rhz = np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get the", "float rotation matrix to use theta, phi : float The co-latitude and azimuth", "Alt/Az appropriately converted, and a 3 x 3 rotation matrix `rot_matrix` which connects", "formula. Parameters ---------- axis : array-like of float 3 element unit vector specifying", "axis, co-latitude is the angle with the positive z axis. rot_matrix : array-like", "np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality.", "be supplied.\" ) r1 = r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1),", "is None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi) ph", "\"\"\" if r1 is None or r2 is None: if theta1 is None", "# Should write a function to do this as well, i.e., pull back", "theta/phi basis to a new reference frame. Given a position (`theta`, `phi`) in", "point under a 3d rotation. Finds the spherical coordinates for point p specified", "the theta/phi basis to a new reference frame. Given a position (`theta`, `phi`)", "vectors in cartesian coordinates for points on a sphere. Parameters ---------- theta, phi", "frame. If either is not provided, they are calculated using `rotate_points_3d`. Note these", "must be a unit vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0,", "# Should test for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) #", "= np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha += 2.0", "( I_matrix + np.sin(angle) * K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix)", "---------- matrix : array-like of float square matrix to test Returns ------- bool", "mode: python; coding: utf-8 -* # Copyright (c) 2019 Radio Astronomy Software Group", "be an ICRS RA/Dec coordinate appropriately converted, and the point to which it", "+ str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not orthogonal: \" + str( rotation", "those associated with (`beta`, `alpha`). Parameters ---------- theta, phi : float The co-latitude", "(theta, phi) to (beta, alpha) beta, alpha : float, optional The theta, phi", "phi coordinates for the point on the sphere (using normal mathematical conventions) in", "q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for shape", "unit vector: \" + str(r2) else: r1 = np.array(r1) r2 = np.array(r2) if", "vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha", "phi2=None): \"\"\" Get the rotation matrix that connects two points or unit vectors", "import numpy as np def r_hat(theta, phi): \"\"\" Get the r hat unit", "# a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta =", "array of float Array of phi hat vectors, shape (3, Npoints) \"\"\" theta", "unit vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0],", "phi_hat(theta, phi): \"\"\" Get the phi hat unit vectors in cartesian coordinates for", "= phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh,", "---------- vec : array-like of float vector to test Returns ------- bool True", "array-like of float vector to test Returns ------- bool True if `vec` is", "Ignored if r1 and r2 are supplied. Returns ------- array 3x3 rotation matrix", ": float, optional The theta, phi coordinates for the point on the sphere", "vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]]", "with the positive z axis. Returns ------- beta, alpha : float The theta,", "the spherical coordinates of the point under a 3d rotation. Finds the spherical", "a must be length 3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis must be", "Trouble lurks if Psi = 0. cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi,", "assert is_unit_vector(r1), \"r1 is not a unit vector: \" + str(r1) assert is_unit_vector(r2),", "test Returns ------- bool True if `vec` is a unit vector, False otherwise.", "axis, co-latitude is the angle with the positive z axis. Returns ------- beta,", "matrix formula. Parameters ---------- axis : array-like of float 3 element unit vector", "local telescope Alt/Az appropriately converted, and a 3 x 3 rotation matrix `rot_matrix`", "must be supplied or all of \" \"theta1, phi1, theta2 and phi2 must", "array\") # Replace with function call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 =", "`vec` is a unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol)", "is between 0 and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm /", "None or beta is None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th =", "if `matrix` is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def", "or phi2 is None: raise ValueError( \"Either r1 and r2 must be supplied", "not a unit vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not", "= np.identity(3) rot_matrix = ( I_matrix + np.sin(angle) * K_matrix + (1.0 -", "phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical coordinates of the", "raise ValueError(\"theta and phi must have the same shape\") thx = np.cos(phi) *", "matrix to rotate vectors by `angle` around `axis`. \"\"\" if axis.shape != (3,):", "co-latitude is the angle with the positive z axis. Ignored if r1 and", "ValueError(\"rot_matrix must be a 3x3 array\") # Replace with function call? q_hat_1 =", "np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha += 2.0 *", "angle to rotate by in radians Returns ------- array 3x3 rotation matrix to", "r1 and r2 must be supplied or all of \" \"theta1, phi1, theta2", "r2) # Note that Psi is between 0 and pi sinPsi = np.sqrt(np.dot(norm,", "Parameters ---------- axis : array-like of float 3 element unit vector specifying the", "be vectorized for multiple (theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3,", "theta hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi)", "theta2 is None or phi2 is None: raise ValueError( \"Either r1 and r2", "co-latitude and azimuth coordinates, respectively, for a point on the sphere, in radians.", "of float Array of r hat vectors, shape (3, Npoints) \"\"\" theta =", "rotate around. angle : float angle to rotate by in radians Returns -------", "ICRS RA/Dec coordinate appropriately converted, and the point to which it is transformed", "cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not a unit vector:", "float Rotation matrix that takes 3-vectors from (theta, phi) to (beta, alpha) beta,", "[axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix = (", "element unit vector specifying the axis to rotate around. angle : float angle", "in either coordinate system. Parameters ---------- rot_matrix : array-like of float rotation matrix", "angle with the positive z axis. rot_matrix : array-like of float Rotation matrix", "theta1, phi1, theta2, phi2 : float, optional The co-latitude and azimuth coordinates, respectively,", "= r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not a", "be length 3 vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and", "and r2 must be unit vectors\") norm = np.cross(r1, r2) # Note that", "rhx = np.cos(phi) * np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz = np.cos(theta)", "standard mathematical coordinate system (`beta`, `alpha`), which will typically be local telescope Alt/Az", "or all of \" \"theta1, phi1, theta2 and phi2 must be supplied.\" )", "float 2 x 2 rotation matrix that takes vectors in the theta/phi basis", "that relates two coordinate charts. The accuracy of this method may not be", "matrix using Rodrigues' rotation matrix formula. Parameters ---------- axis : array-like of float", "defined with respect to the x axis, co-latitude is the angle with the", "positive z axis. Returns ------- array of float Array of phi hat vectors,", "p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a function to do", "if axis.shape != (3,): raise ValueError(\"axis must be a must be length 3", "the spherical coordinates for point p specified by p = R . q,", "Rodrigues' rotation matrix formula. Parameters ---------- axis : array-like of float 3 element", "function to do this as well, i.e., pull back angles from # a", "str(r1) assert is_unit_vector(r2), \"r2 is not a unit vector: \" + str(r2) else:", "the rotation matrix that connects two points or unit vectors on the sphere.", "phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not a unit vector:", "\"\"\" Get the phi hat unit vectors in cartesian coordinates for points on", "Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not", "matrix that rotates the first point or vector into the other. \"\"\" if", "coding: utf-8 -* # Copyright (c) 2019 Radio Astronomy Software Group # Licensed", "Note that Psi is between 0 and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat", "of float rotation matrix to use theta, phi : float The co-latitude and", "Should test for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should", "have the same shape\") thx = np.cos(phi) * np.cos(theta) thy = np.sin(phi) *", "is not a unit vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is", "np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get the theta hat unit vectors", "2 x 2 rotation matrix that takes vectors in the theta/phi basis to", "to do this as well, i.e., pull back angles from # a vector", "position (`theta`, `phi`) in “standard mathematical” coordinates (0 < `theta` < pi, 0", "spherical coordinate transformations on vectors.\"\"\" import numpy as np def r_hat(theta, phi): \"\"\"", "(using normal mathematical conventions) in the rotated frame. \"\"\" # This is NOT", "x 2 rotation matrix that takes vectors in the theta/phi basis to the", "tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters ---------- matrix : array-like of float", "point to which it is transformed in another standard mathematical coordinate system (`beta`,", "* np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\"", "axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix = ( I_matrix + np.sin(angle) *", "or not is_unit_vector(r2): raise ValueError(\"r1 and r2 must be unit vectors\") norm =", "`rotate_points_3d`. Note these may not be as exact as values calculated from astropy.", "coordinates of the point under a 3d rotation. Finds the spherical coordinates for", "`phi`) to those associated with (`beta`, `alpha`). Parameters ---------- theta, phi : float", "(3,): raise ValueError(\"r1 and r2 must be length 3 vectors\") if not is_unit_vector(r1)", "by `angle` around `axis`. \"\"\" if axis.shape != (3,): raise ValueError(\"axis must be", "in cartesian coordinates for points on a sphere. Parameters ---------- theta, phi :", "array of float Array of r hat vectors, shape (3, Npoints) \"\"\" theta", "the basis vectors associated with (`theta`, `phi`) to those associated with (`beta`, `alpha`).", "------- array 3x3 rotation matrix to rotate vectors by `angle` around `axis`. \"\"\"", "points, calculate the rotation matrix which rotates the basis vectors associated with (`theta`,", "unit vectors on the sphere. Parameters ---------- r1, r2 : array-like of float,", "------- bool True if `vec` is a unit vector, False otherwise. \"\"\" return", "theta.shape != phi.shape: raise ValueError(\"theta and phi must have the same shape\") rhx", "to a new reference frame. Given a position (`theta`, `phi`) in “standard mathematical”", "phi) th = theta_hat(theta, phi) ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T,", "< 2 pi) which will typically be an ICRS RA/Dec coordinate appropriately converted,", "Returns ------- bool True if `vec` is a unit vector, False otherwise. \"\"\"", "* np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\"", "with function call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta)", "positive z axis. Returns ------- array of float Array of r hat vectors,", "of the point specified by (theta,phi) and R is the 3D rotation matrix", "rotation matrix which rotates the basis vectors associated with (`theta`, `phi`) to those", "np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\" Get the phi hat unit vectors", "Test for matrix orthogonality. Parameters ---------- matrix : array-like of float square matrix", "= np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the", "for points on a sphere. Parameters ---------- theta, phi : float The co-latitude", "appropriately converted, and a 3 x 3 rotation matrix `rot_matrix` which connects those", "a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2])", "rotation. Finds the spherical coordinates for point p specified by p = R", "rotation matrix that takes vectors in the theta/phi basis to the beta/alpha basis.", "phi must have the same shape\") rhx = np.cos(phi) * np.sin(theta) rhy =", "method may not be good enough near pols in either coordinate system. Parameters", "axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix using Rodrigues' rotation matrix formula. Parameters", "optional length 3 unit vectors theta1, phi1, theta2, phi2 : float, optional The", "np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\" Get", "unit vector: \" + str(r1) assert is_unit_vector(r2), \"r2 is not a unit vector:", "vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and r2 must be", "of this method may not be good enough near pols in either coordinate", "# Replace with function call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi)", "Should write a function to do this as well, i.e., pull back angles", "# Licensed under the 3-clause BSD License \"\"\"Methods for doing spherical coordinate transformations", "coordinates for points on a sphere. Parameters ---------- theta, phi : float The", "phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical coordinates of the point", "sphere (using normal mathematical conventions) in the rotated frame. If either is not", "in the rotated frame. If either is not provided, they are calculated using", "using Rodrigues' rotation matrix formula. Parameters ---------- axis : array-like of float 3", "vectors by `angle` around `axis`. \"\"\" if axis.shape != (3,): raise ValueError(\"axis must", ": float The theta, phi coordinates for the point on the sphere (using", "orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\"", "of \" \"theta1, phi1, theta2 and phi2 must be supplied.\" ) r1 =", "or r2 is None: if theta1 is None or phi1 is None or", "ValueError(\"theta and phi must have the same shape\") phx = -np.sin(phi) phy =", "rotation matrix that relates two coordinate charts. The accuracy of this method may", "cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix using Rodrigues' rotation matrix", "def r_hat(theta, phi): \"\"\" Get the r hat unit vectors in cartesian coordinates", "np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not a unit", "to take vectors in the theta/phi basis to a new reference frame. Given", "q_hat_3)) # Should test for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat)", "def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical coordinates of the point under", "with the positive z axis. Returns ------- array of float Array of phi", "3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis must be a unit vector\") K_matrix", "return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters ---------- matrix", "not is_unit_vector(axis): raise ValueError(\"axis must be a unit vector\") K_matrix = np.array( [[0.0,", "vector: \" + str(r2) else: r1 = np.array(r1) r2 = np.array(r2) if r1.shape", "np.sin(angle) * K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix", "3d rotation. Finds the spherical coordinates for point p specified by p =", "normal mathematical conventions) in the rotated frame. \"\"\" # This is NOT written", "multiple (theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix", "!= phi.shape: raise ValueError(\"theta and phi must have the same shape\") thx =", "array 3x3 rotation matrix to rotate vectors by `angle` around `axis`. \"\"\" if", "(theta,phi) and R is the 3D rotation matrix that relates two coordinate charts.", "Note these may not be as exact as values calculated from astropy. Returns", "def theta_hat(theta, phi): \"\"\" Get the theta hat unit vectors in cartesian coordinates", "norm)) n_hat = norm / sinPsi # Trouble lurks if Psi = 0.", "= axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not a unit vector: \" +", "the point to which it is transformed in another standard mathematical coordinate system", "array-like of float rotation matrix to use theta, phi : float The co-latitude", "phi.shape: raise ValueError(\"theta and phi must have the same shape\") phx = -np.sin(phi)", "back angles from # a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] =", "3 rotation matrix `rot_matrix` which connects those two points, calculate the rotation matrix", "Get the rotation matrix to take vectors in the theta/phi basis to a", "respectively, for a point on the sphere, in radians. Azimuth is defined with", "rotates the first point or vector into the other. \"\"\" if r1 is", "length 3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis must be a unit vector\")", "theta/phi basis to the beta/alpha basis. \"\"\" if alpha is None or beta", "two points or unit vectors on the sphere. Parameters ---------- r1, r2 :", "the sphere. Parameters ---------- r1, r2 : array-like of float, optional length 3", "coordinate system. Parameters ---------- rot_matrix : array-like of float rotation matrix to use", "typically be an ICRS RA/Dec coordinate appropriately converted, and the point to which", "on the sphere (using normal mathematical conventions) in the rotated frame. If either", "have the same shape\") rhx = np.cos(phi) * np.sin(theta) rhy = np.sin(phi) *", "= np.array(theta) phi = np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta and phi", "alpha : float The theta, phi coordinates for the point on the sphere", "a position (`theta`, `phi`) in “standard mathematical” coordinates (0 < `theta` < pi,", "not be good enough near pols in either coordinate system. Parameters ---------- rot_matrix", "cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert", "Get the theta hat unit vectors in cartesian coordinates for points on a", "r2) Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is", "= -np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy, phz)) def", "two coordinate charts. The accuracy of this method may not be good enough", "= np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\"", "two points, calculate the rotation matrix which rotates the basis vectors associated with", "angle : float angle to rotate by in radians Returns ------- array 3x3", "for unit vectors. Parameters ---------- vec : array-like of float vector to test", "= np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for shape of p_hat p_hat =", "z axis. Returns ------- array of float Array of r hat vectors, shape", "sphere (using normal mathematical conventions) in the rotated frame. \"\"\" # This is", "phi2) assert is_unit_vector(r1), \"r1 is not a unit vector: \" + str(r1) assert", "vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not orthogonal: \" +", "np.identity(3) rot_matrix = ( I_matrix + np.sin(angle) * K_matrix + (1.0 - np.cos(angle))", "beta is None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi)", "not provided, they are calculated using `rotate_points_3d`. Note these may not be as", "If either is not provided, they are calculated using `rotate_points_3d`. Note these may", "phz = np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get", "point p specified by p = R . q, where q is the", "shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a function", "matrix that takes 3-vectors from (theta, phi) to (beta, alpha) beta, alpha :", "axis, co-latitude is the angle with the positive z axis. Ignored if r1", "axis. Ignored if r1 and r2 are supplied. Returns ------- array 3x3 rotation", "be good enough near pols in either coordinate system. Parameters ---------- rot_matrix :", "(3, 3): raise ValueError(\"rot_matrix must be a 3x3 array\") # Replace with function", "matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors. Parameters ----------", "angle with the positive z axis. Returns ------- array of float Array of", "length 3 unit vectors theta1, phi1, theta2, phi2 : float, optional The co-latitude", "the r hat unit vectors in cartesian coordinates for points on a sphere.", "for the point on the sphere (using normal mathematical conventions) in the rotated", "to rotate vectors by `angle` around `axis`. \"\"\" if axis.shape != (3,): raise", "\"n_hat is not a unit vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix", "[-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix = ( I_matrix + np.sin(angle)", "rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must be a 3x3 array\") # Replace", "if alpha is None or beta is None: beta, alpha = rotate_points_3d(rot_matrix, theta,", "not a unit vector: \" + str(r1) assert is_unit_vector(r2), \"r2 is not a", "theta1 is None or phi1 is None or theta2 is None or phi2", "= np.cos(phi) * np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return", "good enough near pols in either coordinate system. Parameters ---------- rot_matrix : array-like", "rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi) ph = phi_hat(theta, phi) bh =", "x axis, co-latitude is the angle with the positive z axis. rot_matrix :", "must be unit vectors\") norm = np.cross(r1, r2) # Note that Psi is", "beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha +=", "------- array 3x3 rotation matrix that rotates the first point or vector into", "must be length 3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis must be a", "axis, co-latitude is the angle with the positive z axis. Returns ------- array", "coordinates for the point on the sphere (using normal mathematical conventions) in the", "may not be as exact as values calculated from astropy. Returns ------- array", "\"\"\" if axis.shape != (3,): raise ValueError(\"axis must be a must be length", "= np.array(r2) if r1.shape != (3,) or r2.shape != (3,): raise ValueError(\"r1 and", "z axis. Ignored if r1 and r2 are supplied. Returns ------- array 3x3", "* np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ):", "in another standard mathematical coordinate system (`beta`, `alpha`), which will typically be local", "phi must have the same shape\") phx = -np.sin(phi) phy = np.cos(phi) phz", "a unit vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1],", "Rotation matrix that takes 3-vectors from (theta, phi) to (beta, alpha) beta, alpha", "np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy, thz))", "= ( I_matrix + np.sin(angle) * K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix,", "matrix to take vectors in the theta/phi basis to a new reference frame.", "\"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None):", "norm / sinPsi # Trouble lurks if Psi = 0. cosPsi = np.dot(r1,", "= -np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\" Get the phi", "sphere. Parameters ---------- r1, r2 : array-like of float, optional length 3 unit", "phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th)", "and a 3 x 3 rotation matrix `rot_matrix` which connects those two points,", "thy = np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy, thz)) def", "rotation matrix that connects two points or unit vectors on the sphere. Parameters", "a unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None,", "Radio Astronomy Software Group # Licensed under the 3-clause BSD License \"\"\"Methods for", ": array-like of float vector to test Returns ------- bool True if `vec`", "theta_hat(theta, phi) ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX", "return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors.", "phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get the rotation matrix to take vectors", "it is transformed in another standard mathematical coordinate system (`beta`, `alpha`), which will", "\"\"\" theta = np.array(theta) phi = np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta", "a 3x3 array\") # Replace with function call? q_hat_1 = np.cos(phi) * np.sin(theta)", "basis to a new reference frame. Given a position (`theta`, `phi`) in “standard", "these may not be as exact as values calculated from astropy. Returns -------", "ValueError(\"theta and phi must have the same shape\") rhx = np.cos(phi) * np.sin(theta)", "------- array of float Array of r hat vectors, shape (3, Npoints) \"\"\"", "around `axis`. \"\"\" if axis.shape != (3,): raise ValueError(\"axis must be a must", "float Array of r hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta)", "rotation matrix to take vectors in the theta/phi basis to a new reference", "r hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi)", "if not is_unit_vector(axis): raise ValueError(\"axis must be a unit vector\") K_matrix = np.array(", "---------- rot_matrix : array-like of float rotation matrix to use theta, phi :", "by in radians Returns ------- array 3x3 rotation matrix to rotate vectors by", "matrix `rot_matrix` which connects those two points, calculate the rotation matrix which rotates", "that Psi is between 0 and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat =", "must be length 3 vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1", "matrix that takes vectors in the theta/phi basis to the beta/alpha basis. \"\"\"", "0 and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi #", "Get the r hat unit vectors in cartesian coordinates for points on a", "is not a unit vector: \" + str(r2) else: r1 = np.array(r1) r2", "phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation matrix that connects two points or", "3D position vector of the point specified by (theta,phi) and R is the", "Group # Licensed under the 3-clause BSD License \"\"\"Methods for doing spherical coordinate", "np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get the theta hat", "(1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\"", "return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\"", "3 vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and r2 must", "bool True if `matrix` is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3),", "rhz)) def theta_hat(theta, phi): \"\"\" Get the theta hat unit vectors in cartesian", "3): raise ValueError(\"rot_matrix must be a 3x3 array\") # Replace with function call?", "converted, and a 3 x 3 rotation matrix `rot_matrix` which connects those two", "is the angle with the positive z axis. Returns ------- array of float", "with (`beta`, `alpha`). Parameters ---------- theta, phi : float The co-latitude and azimuth", "= np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\",", "of float Array of theta hat vectors, shape (3, Npoints) \"\"\" theta =", "np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get", "is_unit_vector(r2): raise ValueError(\"r1 and r2 must be unit vectors\") norm = np.cross(r1, r2)", "must have the same shape\") phx = -np.sin(phi) phy = np.cos(phi) phz =", "basis to the beta/alpha basis. \"\"\" if alpha is None or beta is", "norm = np.cross(r1, r2) # Note that Psi is between 0 and pi", "calculate the rotation matrix which rotates the basis vectors associated with (`theta`, `phi`)", "same shape\") phx = -np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx,", "which rotates the basis vectors associated with (`theta`, `phi`) to those associated with", "bh, th) sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def", "the x axis, co-latitude is the angle with the positive z axis. Returns", "!= phi.shape: raise ValueError(\"theta and phi must have the same shape\") phx =", ": float angle to rotate by in radians Returns ------- array 3x3 rotation", "converted, and the point to which it is transformed in another standard mathematical", "True if `vec` is a unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec),", "rot_matrix = ( I_matrix + np.sin(angle) * K_matrix + (1.0 - np.cos(angle)) *", "vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi) if theta.shape", "atol=1e-12): p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha", "= np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha += 2.0 * np.pi return", "the sphere (using normal mathematical conventions) in the rotated frame. If either is", "coordinates (0 < `theta` < pi, 0 < `phi` < 2 pi) which", "------- array of float 2 x 2 rotation matrix that takes vectors in", "theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh, ph) return", "may not be good enough near pols in either coordinate system. Parameters ----------", "and R is the 3D rotation matrix that relates two coordinate charts. The", "utf-8 -* # Copyright (c) 2019 Radio Astronomy Software Group # Licensed under", "q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3))", "point specified by (theta,phi) and R is the 3D rotation matrix that relates", "True if `matrix` is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol)", "must be a 3x3 array\") # Replace with function call? q_hat_1 = np.cos(phi)", "is_orthogonal(rotation), \"rotation matrix is not orthogonal: \" + str( rotation ) return rotation", "connects those two points, calculate the rotation matrix which rotates the basis vectors", "do this as well, i.e., pull back angles from # a vector if", "matrix that relates two coordinate charts. The accuracy of this method may not", "3x3 rotation matrix that rotates the first point or vector into the other.", "otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None,", "!= (3,): raise ValueError(\"axis must be a must be length 3 vector\") if", "the same shape\") rhx = np.cos(phi) * np.sin(theta) rhy = np.sin(phi) * np.sin(theta)", "the positive z axis. Returns ------- array of float Array of phi hat", "azimuth coordinates, respectively, for a point on the sphere, in radians. Azimuth is", "(theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape != (3, 3): raise ValueError(\"rot_matrix must", "= np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for shape of", "spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get the rotation matrix to", "or phi1 is None or theta2 is None or phi2 is None: raise", "= 0. cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat,", ". q, where q is the 3D position vector of the point specified", "is_unit_vector(n_hat), \"n_hat is not a unit vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation", "alpha is None or beta is None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi)", "thz = -np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta, phi): \"\"\" Get the", "typically be local telescope Alt/Az appropriately converted, and a 3 x 3 rotation", "\"\"\" Test for unit vectors. Parameters ---------- vec : array-like of float vector", "def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation matrix that", "p = R . q, where q is the 3D position vector of", "alpha) beta, alpha : float, optional The theta, phi coordinates for the point", "to rotate around. angle : float angle to rotate by in radians Returns", "and azimuth coordinates, respectively, for a point on the sphere, in radians. Azimuth", "float, optional The co-latitude and azimuth coordinates, respectively, for a point on the", "rot_matrix, q_hat) # Should write a function to do this as well, i.e.,", ": array-like of float square matrix to test Returns ------- bool True if", "same shape\") thx = np.cos(phi) * np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz", "np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\"", "0 < `phi` < 2 pi) which will typically be an ICRS RA/Dec", "connects two points or unit vectors on the sphere. Parameters ---------- r1, r2", "phi1 is None or theta2 is None or phi2 is None: raise ValueError(", "or r2.shape != (3,): raise ValueError(\"r1 and r2 must be length 3 vectors\")", "if r1 and r2 are supplied. Returns ------- array 3x3 rotation matrix that", "z axis. rot_matrix : array-like of float Rotation matrix that takes 3-vectors from", "= np.cross(r1, r2) # Note that Psi is between 0 and pi sinPsi", "alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get the rotation", "is None: raise ValueError( \"Either r1 and r2 must be supplied or all", "np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi # Trouble lurks if Psi =", "Psi = 0. cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation =", "Returns ------- array 3x3 rotation matrix that rotates the first point or vector", "is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters ---------- matrix : array-like of", "is None or phi1 is None or theta2 is None or phi2 is", "in radians. Azimuth is defined with respect to the x axis, co-latitude is", "specified by p = R . q, where q is the 3D position", "telescope Alt/Az appropriately converted, and a 3 x 3 rotation matrix `rot_matrix` which", "(`theta`, `phi`) to those associated with (`beta`, `alpha`). Parameters ---------- theta, phi :", "sphere. Parameters ---------- theta, phi : float The co-latitude and azimuth coordinates, respectively,", "“standard mathematical” coordinates (0 < `theta` < pi, 0 < `phi` < 2", "a function to do this as well, i.e., pull back angles from #", "np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix, theta, phi): \"\"\" Get the spherical coordinates of", "be a must be length 3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis must", "# -*- mode: python; coding: utf-8 -* # Copyright (c) 2019 Radio Astronomy", "new reference frame. Given a position (`theta`, `phi`) in “standard mathematical” coordinates (0", "Returns ------- bool True if `matrix` is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix,", "shape\") phx = -np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy,", "vectors theta1, phi1, theta2, phi2 : float, optional The co-latitude and azimuth coordinates,", "`phi`) in “standard mathematical” coordinates (0 < `theta` < pi, 0 < `phi`", "not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and r2 must be unit vectors\")", "cosX = np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX],", "under the 3-clause BSD License \"\"\"Methods for doing spherical coordinate transformations on vectors.\"\"\"", "p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a function to do this", "0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix = ( I_matrix", "\" \"theta1, phi1, theta2 and phi2 must be supplied.\" ) r1 = r_hat(theta1,", "* np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy,", "the positive z axis. Ignored if r1 and r2 are supplied. Returns -------", "Finds the spherical coordinates for point p specified by p = R .", "z axis. Returns ------- array of float Array of phi hat vectors, shape", "np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should", "in radians Returns ------- array 3x3 rotation matrix to rotate vectors by `angle`", "and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi # Trouble", "str(r2) else: r1 = np.array(r1) r2 = np.array(r2) if r1.shape != (3,) or", "np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha += 2.0 * np.pi return (beta,", "be local telescope Alt/Az appropriately converted, and a 3 x 3 rotation matrix", "spherical coordinates of the point under a 3d rotation. Finds the spherical coordinates", "rot_matrix, beta=None, alpha=None ): \"\"\" Get the rotation matrix to take vectors in", "------- array of float Array of phi hat vectors, shape (3, Npoints) \"\"\"", "\"\"\" Get the rotation matrix that connects two points or unit vectors on", "Replace with function call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) *", "is transformed in another standard mathematical coordinate system (`beta`, `alpha`), which will typically", "vectors.\"\"\" import numpy as np def r_hat(theta, phi): \"\"\" Get the r hat", "point on the sphere (using normal mathematical conventions) in the rotated frame. If", "Parameters ---------- matrix : array-like of float square matrix to test Returns -------", "tol=1e-15): \"\"\" Test for unit vectors. Parameters ---------- vec : array-like of float", "is a unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def", "< `phi` < 2 pi) which will typically be an ICRS RA/Dec coordinate", "and phi2 must be supplied.\" ) r1 = r_hat(theta1, phi1) r2 = r_hat(theta2,", "mathematical” coordinates (0 < `theta` < pi, 0 < `phi` < 2 pi)", "with the positive z axis. Returns ------- array of float Array of r", "hat unit vectors in cartesian coordinates for points on a sphere. Parameters ----------", "for point p specified by p = R . q, where q is", "alpha = rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi) ph = phi_hat(theta, phi)", "[[0.0, -axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3)", "phi2 is None: raise ValueError( \"Either r1 and r2 must be supplied or", "co-latitude is the angle with the positive z axis. Returns ------- array of", "phx = -np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy, phz))", "is not provided, they are calculated using `rotate_points_3d`. Note these may not be", "specifying the axis to rotate around. angle : float angle to rotate by", "`axis`. \"\"\" if axis.shape != (3,): raise ValueError(\"axis must be a must be", "License \"\"\"Methods for doing spherical coordinate transformations on vectors.\"\"\" import numpy as np", "= np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat),", "coordinates, respectively, for a point on the sphere, in radians. Azimuth is defined", "is None: if theta1 is None or phi1 is None or theta2 is", "r2.shape != (3,): raise ValueError(\"r1 and r2 must be length 3 vectors\") if", "3x3 array\") # Replace with function call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2", "a unit vector: \" + str(r2) else: r1 = np.array(r1) r2 = np.array(r2)", "rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters ---------- matrix :", "\"\"\" Get the r hat unit vectors in cartesian coordinates for points on", "the sphere (using normal mathematical conventions) in the rotated frame. \"\"\" # This", "\"r1 is not a unit vector: \" + str(r1) assert is_unit_vector(r2), \"r2 is", "theta, phi) th = theta_hat(theta, phi) ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\",", "r2 must be supplied or all of \" \"theta1, phi1, theta2 and phi2", "is the 3D position vector of the point specified by (theta,phi) and R", "matrix which rotates the basis vectors associated with (`theta`, `phi`) to those associated", "the positive z axis. Returns ------- array of float Array of r hat", ": array-like of float 3 element unit vector specifying the axis to rotate", "= theta_hat(theta, phi) ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha))", "None or phi2 is None: raise ValueError( \"Either r1 and r2 must be", "values calculated from astropy. Returns ------- array of float 2 x 2 rotation", "< pi, 0 < `phi` < 2 pi) which will typically be an", "\"\"\" # This is NOT written to be vectorized for multiple (theta, phi)", "alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX,", "alpha += 2.0 * np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix,", "matrix : array-like of float square matrix to test Returns ------- bool True", "to test Returns ------- bool True if `matrix` is orthogonal, False otherwise. \"\"\"", "rot_matrix : array-like of float Rotation matrix that takes 3-vectors from (theta, phi)", "shape (3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi) if theta.shape !=", "float angle to rotate by in radians Returns ------- array 3x3 rotation matrix", "---------- r1, r2 : array-like of float, optional length 3 unit vectors theta1,", "respect to the x axis, co-latitude is the angle with the positive z", "phi1, theta2 and phi2 must be supplied.\" ) r1 = r_hat(theta1, phi1) r2", "and phi must have the same shape\") rhx = np.cos(phi) * np.sin(theta) rhy", "angles from # a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0", "\"\"\" Get the rotation matrix using Rodrigues' rotation matrix formula. Parameters ---------- axis", "2 pi) which will typically be an ICRS RA/Dec coordinate appropriately converted, and", "float, optional The theta, phi coordinates for the point on the sphere (using", "either coordinate system. Parameters ---------- rot_matrix : array-like of float rotation matrix to", "use theta, phi : float The co-latitude and azimuth coordinates, respectively, for a", "which it is transformed in another standard mathematical coordinate system (`beta`, `alpha`), which", "(3,): raise ValueError(\"axis must be a must be length 3 vector\") if not", "unit vector: \" + str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not orthogonal: \"", "= np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat", "in “standard mathematical” coordinates (0 < `theta` < pi, 0 < `phi` <", "The co-latitude and azimuth coordinates, respectively, for a point on the sphere, in", "!= (3,): raise ValueError(\"r1 and r2 must be length 3 vectors\") if not", "coordinates for point p specified by p = R . q, where q", "q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta)", "K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix,", "to be vectorized for multiple (theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape !=", "return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix", "-*- mode: python; coding: utf-8 -* # Copyright (c) 2019 Radio Astronomy Software", "where q is the 3D position vector of the point specified by (theta,phi)", "rotation matrix to rotate vectors by `angle` around `axis`. \"\"\" if axis.shape !=", "the point on the sphere (using normal mathematical conventions) in the rotated frame.", "for matrix orthogonality. Parameters ---------- matrix : array-like of float square matrix to", "raise ValueError(\"theta and phi must have the same shape\") phx = -np.sin(phi) phy", "ValueError(\"r1 and r2 must be length 3 vectors\") if not is_unit_vector(r1) or not", "test Returns ------- bool True if `matrix` is orthogonal, False otherwise. \"\"\" return", "angle with the positive z axis. Ignored if r1 and r2 are supplied.", "are calculated using `rotate_points_3d`. Note these may not be as exact as values", "co-latitude is the angle with the positive z axis. Returns ------- beta, alpha", "assert is_unit_vector(r2), \"r2 is not a unit vector: \" + str(r2) else: r1", "is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors. Parameters ---------- vec : array-like of", "test for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write", "Psi) assert is_unit_vector(n_hat), \"n_hat is not a unit vector: \" + str(n_hat) assert", "be supplied or all of \" \"theta1, phi1, theta2 and phi2 must be", "optional The co-latitude and azimuth coordinates, respectively, for a point on the sphere,", "\"Either r1 and r2 must be supplied or all of \" \"theta1, phi1,", "else: r1 = np.array(r1) r2 = np.array(r2) if r1.shape != (3,) or r2.shape", "positive z axis. Ignored if r1 and r2 are supplied. Returns ------- array", "atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation matrix", "mathematical conventions) in the rotated frame. \"\"\" # This is NOT written to", "q_hat_2, q_hat_3)) # Should test for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix,", "Array of theta hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi", "phi2 : float, optional The co-latitude and azimuth coordinates, respectively, for a point", "using `rotate_points_3d`. Note these may not be as exact as values calculated from", "I_matrix = np.identity(3) rot_matrix = ( I_matrix + np.sin(angle) * K_matrix + (1.0", "str(n_hat) assert is_orthogonal(rotation), \"rotation matrix is not orthogonal: \" + str( rotation )", "frame. \"\"\" # This is NOT written to be vectorized for multiple (theta,", "sphere, in radians. Azimuth is defined with respect to the x axis, co-latitude", "must have the same shape\") thx = np.cos(phi) * np.cos(theta) thy = np.sin(phi)", "Parameters ---------- vec : array-like of float vector to test Returns ------- bool", "np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for shape of p_hat", "coordinate transformations on vectors.\"\"\" import numpy as np def r_hat(theta, phi): \"\"\" Get", "The accuracy of this method may not be good enough near pols in", "if alpha < 0: alpha += 2.0 * np.pi return (beta, alpha) def", "rotation matrix using Rodrigues' rotation matrix formula. Parameters ---------- axis : array-like of", "coordinate charts. The accuracy of this method may not be good enough near", "axis. Returns ------- beta, alpha : float The theta, phi coordinates for the", "call? q_hat_1 = np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 =", "radians Returns ------- array 3x3 rotation matrix to rotate vectors by `angle` around", "is_unit_vector(axis): raise ValueError(\"axis must be a unit vector\") K_matrix = np.array( [[0.0, -axis[2],", "point on the sphere (using normal mathematical conventions) in the rotated frame. \"\"\"", "axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not a unit vector: \" + str(n_hat)", "phi hat unit vectors in cartesian coordinates for points on a sphere. Parameters", "`theta` < pi, 0 < `phi` < 2 pi) which will typically be", "-* # Copyright (c) 2019 Radio Astronomy Software Group # Licensed under the", "\"\"\" Get the theta hat unit vectors in cartesian coordinates for points on", "phi hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi = np.array(phi)", "axis. Returns ------- array of float Array of r hat vectors, shape (3,", "+ (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15):", "supplied or all of \" \"theta1, phi1, theta2 and phi2 must be supplied.\"", "a point on the sphere, in radians. Azimuth is defined with respect to", "The theta, phi coordinates for the point on the sphere (using normal mathematical", "have the same shape\") phx = -np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi)", "supplied.\" ) r1 = r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1", "K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test for matrix orthogonality. Parameters", "other. \"\"\" if r1 is None or r2 is None: if theta1 is", "by p = R . q, where q is the 3D position vector", "that connects two points or unit vectors on the sphere. Parameters ---------- r1,", "Parameters ---------- r1, r2 : array-like of float, optional length 3 unit vectors", "2.0 * np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None", "the angle with the positive z axis. Returns ------- array of float Array", "numpy as np def r_hat(theta, phi): \"\"\" Get the r hat unit vectors", "theta_hat(theta, phi): \"\"\" Get the theta hat unit vectors in cartesian coordinates for", "3 x 3 rotation matrix `rot_matrix` which connects those two points, calculate the", "associated with (`theta`, `phi`) to those associated with (`beta`, `alpha`). Parameters ---------- theta,", "is the angle with the positive z axis. Ignored if r1 and r2", "p_hat[2] = 1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha <", "---------- theta, phi : float The co-latitude and azimuth coordinates, respectively, for a", "with respect to the x axis, co-latitude is the angle with the positive", "def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get the rotation matrix", "to rotate by in radians Returns ------- array 3x3 rotation matrix to rotate", "vectors. Parameters ---------- vec : array-like of float vector to test Returns -------", "# Copyright (c) 2019 Radio Astronomy Software Group # Licensed under the 3-clause", "`matrix` is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec,", "Returns ------- array of float Array of phi hat vectors, shape (3, Npoints)", "as values calculated from astropy. Returns ------- array of float 2 x 2", "bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX =", "Returns ------- array of float 2 x 2 rotation matrix that takes vectors", "rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh, ph)", "be a 3x3 array\") # Replace with function call? q_hat_1 = np.cos(phi) *", "float The co-latitude and azimuth coordinates, respectively, for a point on the sphere,", "not be as exact as values calculated from astropy. Returns ------- array of", "for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a", "in the theta/phi basis to the beta/alpha basis. \"\"\" if alpha is None", "ph = phi_hat(theta, phi) bh = np.einsum(\"ab...,b...->a...\", rot_matrix.T, theta_hat(beta, alpha)) cosX = np.einsum(\"a...,a...\",", "0.0]] ) I_matrix = np.identity(3) rot_matrix = ( I_matrix + np.sin(angle) * K_matrix", "pull back angles from # a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2]", "+= 2.0 * np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi, rot_matrix, beta=None,", "r1, r2 : array-like of float, optional length 3 unit vectors theta1, phi1,", "by (theta,phi) and R is the 3D rotation matrix that relates two coordinate", "None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi) ph =", "is None or phi2 is None: raise ValueError( \"Either r1 and r2 must", "r1 = np.array(r1) r2 = np.array(r2) if r1.shape != (3,) or r2.shape !=", "ValueError( \"Either r1 and r2 must be supplied or all of \" \"theta1,", "phi2 must be supplied.\" ) r1 = r_hat(theta1, phi1) r2 = r_hat(theta2, phi2)", "bool True if `vec` is a unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec,", "is_unit_vector(r2), \"r2 is not a unit vector: \" + str(r2) else: r1 =", "a sphere. Parameters ---------- theta, phi : float The co-latitude and azimuth coordinates,", "system. Parameters ---------- rot_matrix : array-like of float rotation matrix to use theta,", "those two points, calculate the rotation matrix which rotates the basis vectors associated", "vectors\") norm = np.cross(r1, r2) # Note that Psi is between 0 and", "float square matrix to test Returns ------- bool True if `matrix` is orthogonal,", "------- beta, alpha : float The theta, phi coordinates for the point on", "= R . q, where q is the 3D position vector of the", "either is not provided, they are calculated using `rotate_points_3d`. Note these may not", "conventions) in the rotated frame. If either is not provided, they are calculated", "axis. Returns ------- array of float Array of theta hat vectors, shape (3,", "calculated using `rotate_points_3d`. Note these may not be as exact as values calculated", "exact as values calculated from astropy. Returns ------- array of float 2 x", "sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle):", "that takes 3-vectors from (theta, phi) to (beta, alpha) beta, alpha : float,", "th) sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis,", "3x3 rotation matrix to rotate vectors by `angle` around `axis`. \"\"\" if axis.shape", "theta.shape != phi.shape: raise ValueError(\"theta and phi must have the same shape\") phx", "an ICRS RA/Dec coordinate appropriately converted, and the point to which it is", "< 0: alpha += 2.0 * np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta,", "-axis[2], axis[1]], [axis[2], 0.0, -axis[0]], [-axis[1], axis[0], 0.0]] ) I_matrix = np.identity(3) rot_matrix", "\" + str(r2) else: r1 = np.array(r1) r2 = np.array(r2) if r1.shape !=", "positive z axis. Returns ------- array of float Array of theta hat vectors,", "another standard mathematical coordinate system (`beta`, `alpha`), which will typically be local telescope", "pi) which will typically be an ICRS RA/Dec coordinate appropriately converted, and the", "is orthogonal, False otherwise. \"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15):", "phi = np.array(phi) if theta.shape != phi.shape: raise ValueError(\"theta and phi must have", "-np.sin(phi) phy = np.cos(phi) phz = np.zeros_like(phi) return np.stack((phx, phy, phz)) def rotate_points_3d(rot_matrix,", "np.sin(theta) rhy = np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy, rhz))", "points or unit vectors on the sphere. Parameters ---------- r1, r2 : array-like", "the first point or vector into the other. \"\"\" if r1 is None", "np.einsum(\"a...,a...\", bh, th) sinX = np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]])", "np.cos(phi) * np.sin(theta) q_hat_2 = np.sin(phi) * np.sin(theta) q_hat_3 = np.cos(theta) q_hat =", "raise ValueError(\"axis must be a must be length 3 vector\") if not is_unit_vector(axis):", "must be supplied.\" ) r1 = r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert", "array-like of float square matrix to test Returns ------- bool True if `matrix`", "< `theta` < pi, 0 < `phi` < 2 pi) which will typically", "this as well, i.e., pull back angles from # a vector if np.isclose(p_hat[2],", "from # a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12): p_hat[2] = 1.0 beta", "Returns ------- array of float Array of theta hat vectors, shape (3, Npoints)", "= np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a function to do this as", "(`theta`, `phi`) in “standard mathematical” coordinates (0 < `theta` < pi, 0 <", "alpha=None ): \"\"\" Get the rotation matrix to take vectors in the theta/phi", "must be a must be length 3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis", "vector into the other. \"\"\" if r1 is None or r2 is None:", "= np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return np.stack((thx, thy, thz)) def phi_hat(theta,", "2 rotation matrix that takes vectors in the theta/phi basis to the beta/alpha", "theta2, phi2 : float, optional The co-latitude and azimuth coordinates, respectively, for a", "\"theta1, phi1, theta2 and phi2 must be supplied.\" ) r1 = r_hat(theta1, phi1)", "0. cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi)", "basis. \"\"\" if alpha is None or beta is None: beta, alpha =", "point on the sphere, in radians. Azimuth is defined with respect to the", "theta, phi, rot_matrix, beta=None, alpha=None ): \"\"\" Get the rotation matrix to take", "beta, alpha : float, optional The theta, phi coordinates for the point on", "(using normal mathematical conventions) in the rotated frame. If either is not provided,", "first point or vector into the other. \"\"\" if r1 is None or", "np.array(r1) r2 = np.array(r2) if r1.shape != (3,) or r2.shape != (3,): raise", "be as exact as values calculated from astropy. Returns ------- array of float", "thx = np.cos(phi) * np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz = -np.sin(theta)", "to use theta, phi : float The co-latitude and azimuth coordinates, respectively, for", "of float square matrix to test Returns ------- bool True if `matrix` is", "!= phi.shape: raise ValueError(\"theta and phi must have the same shape\") rhx =", "array of float Array of theta hat vectors, shape (3, Npoints) \"\"\" theta", "`phi` < 2 pi) which will typically be an ICRS RA/Dec coordinate appropriately", "rotation matrix to use theta, phi : float The co-latitude and azimuth coordinates,", "float, optional length 3 unit vectors theta1, phi1, theta2, phi2 : float, optional", "that takes vectors in the theta/phi basis to the beta/alpha basis. \"\"\" if", "of theta hat vectors, shape (3, Npoints) \"\"\" theta = np.array(theta) phi =", "square matrix to test Returns ------- bool True if `matrix` is orthogonal, False", "written to be vectorized for multiple (theta, phi) rot_matrix = np.array(rot_matrix) if rot_matrix.shape", "- np.cos(angle)) * np.dot(K_matrix, K_matrix) ) return rot_matrix def is_orthogonal(matrix, tol=1e-15): \"\"\" Test", "= np.arctan2(sinPsi, cosPsi) rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not a", "assert is_orthogonal(rotation), \"rotation matrix is not orthogonal: \" + str( rotation ) return", "points on a sphere. Parameters ---------- theta, phi : float The co-latitude and", "\"\"\"Methods for doing spherical coordinate transformations on vectors.\"\"\" import numpy as np def", "alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha += 2.0 * np.pi", "np.einsum(\"a...,a...\", bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get", "angle with the positive z axis. Returns ------- beta, alpha : float The", "q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for shape of p_hat p_hat", "+ str(r2) else: r1 = np.array(r1) r2 = np.array(r2) if r1.shape != (3,)", "of float vector to test Returns ------- bool True if `vec` is a", "reference frame. Given a position (`theta`, `phi`) in “standard mathematical” coordinates (0 <", "rotation = axis_angle_rotation_matrix(n_hat, Psi) assert is_unit_vector(n_hat), \"n_hat is not a unit vector: \"", "rotate vectors by `angle` around `axis`. \"\"\" if axis.shape != (3,): raise ValueError(\"axis", "np.cross(r1, r2) # Note that Psi is between 0 and pi sinPsi =", "phi): \"\"\" Get the theta hat unit vectors in cartesian coordinates for points", "coordinate appropriately converted, and the point to which it is transformed in another", "mathematical coordinate system (`beta`, `alpha`), which will typically be local telescope Alt/Az appropriately", "the axis to rotate around. angle : float angle to rotate by in", "sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the rotation matrix using Rodrigues'", "is the angle with the positive z axis. Returns ------- beta, alpha :", "r2 : array-like of float, optional length 3 unit vectors theta1, phi1, theta2,", "for doing spherical coordinate transformations on vectors.\"\"\" import numpy as np def r_hat(theta,", "\"\"\" return np.allclose(np.matmul(matrix, matrix.T), np.eye(3), atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit", "\"\"\" Test for matrix orthogonality. Parameters ---------- matrix : array-like of float square", "r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the rotation matrix that connects two", "Copyright (c) 2019 Radio Astronomy Software Group # Licensed under the 3-clause BSD", "with (`theta`, `phi`) to those associated with (`beta`, `alpha`). Parameters ---------- theta, phi", "the 3D position vector of the point specified by (theta,phi) and R is", "theta, phi): \"\"\" Get the spherical coordinates of the point under a 3d", "the x axis, co-latitude is the angle with the positive z axis. Ignored", "Psi is between 0 and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm", "r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is not a unit vector: \"", "transformed in another standard mathematical coordinate system (`beta`, `alpha`), which will typically be", "Returns ------- array 3x3 rotation matrix to rotate vectors by `angle` around `axis`.", "atol=tol) def is_unit_vector(vec, tol=1e-15): \"\"\" Test for unit vectors. Parameters ---------- vec :", "None: if theta1 is None or phi1 is None or theta2 is None", "the sphere, in radians. Azimuth is defined with respect to the x axis,", "phi) to (beta, alpha) beta, alpha : float, optional The theta, phi coordinates", "be unit vectors\") norm = np.cross(r1, r2) # Note that Psi is between", "bh, ph) return np.array([[cosX, sinX], [-sinX, cosX]]) def axis_angle_rotation_matrix(axis, angle): \"\"\" Get the", "alpha < 0: alpha += 2.0 * np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix(", "associated with (`beta`, `alpha`). Parameters ---------- theta, phi : float The co-latitude and", "of p_hat p_hat = np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a function to", "0: alpha += 2.0 * np.pi return (beta, alpha) def spherical_basis_vector_rotation_matrix( theta, phi,", "theta2=None, phi2=None): \"\"\" Get the rotation matrix that connects two points or unit", "Licensed under the 3-clause BSD License \"\"\"Methods for doing spherical coordinate transformations on", "be length 3 vector\") if not is_unit_vector(axis): raise ValueError(\"axis must be a unit", "vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None, theta2=None, phi2=None): \"\"\" Get the", "* np.sin(theta) q_hat_3 = np.cos(theta) q_hat = np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test", ": array-like of float Rotation matrix that takes 3-vectors from (theta, phi) to", "raise ValueError(\"axis must be a unit vector\") K_matrix = np.array( [[0.0, -axis[2], axis[1]],", "\" + str(r1) assert is_unit_vector(r2), \"r2 is not a unit vector: \" +", "on the sphere, in radians. Azimuth is defined with respect to the x", "theta2 and phi2 must be supplied.\" ) r1 = r_hat(theta1, phi1) r2 =", "between 0 and pi sinPsi = np.sqrt(np.dot(norm, norm)) n_hat = norm / sinPsi", "the positive z axis. Returns ------- array of float Array of theta hat", "a 3 x 3 rotation matrix `rot_matrix` which connects those two points, calculate", "or beta is None: beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta,", "unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None,", "the point specified by (theta,phi) and R is the 3D rotation matrix that", "r1 is None or r2 is None: if theta1 is None or phi1", "length 3 vectors\") if not is_unit_vector(r1) or not is_unit_vector(r2): raise ValueError(\"r1 and r2", "phi1, theta2, phi2 : float, optional The co-latitude and azimuth coordinates, respectively, for", "vector specifying the axis to rotate around. angle : float angle to rotate", "rhy = np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy, rhz)) def", "n_hat = norm / sinPsi # Trouble lurks if Psi = 0. cosPsi", "coordinate system (`beta`, `alpha`), which will typically be local telescope Alt/Az appropriately converted,", "of float 3 element unit vector specifying the axis to rotate around. angle", "if `vec` is a unit vector, False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1,", "phi.shape: raise ValueError(\"theta and phi must have the same shape\") rhx = np.cos(phi)", "Given a position (`theta`, `phi`) in “standard mathematical” coordinates (0 < `theta` <", "\"r2 is not a unit vector: \" + str(r2) else: r1 = np.array(r1)", "vectors on the sphere. Parameters ---------- r1, r2 : array-like of float, optional", "float 3 element unit vector specifying the axis to rotate around. angle :", "Get the phi hat unit vectors in cartesian coordinates for points on a", "q is the 3D position vector of the point specified by (theta,phi) and", "beta, alpha = rotate_points_3d(rot_matrix, theta, phi) th = theta_hat(theta, phi) ph = phi_hat(theta,", "lurks if Psi = 0. cosPsi = np.dot(r1, r2) Psi = np.arctan2(sinPsi, cosPsi)", "= np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi): \"\"\" Get the theta", "unit vectors in cartesian coordinates for points on a sphere. Parameters ---------- theta,", "and r2 must be supplied or all of \" \"theta1, phi1, theta2 and", "of float Array of phi hat vectors, shape (3, Npoints) \"\"\" theta =", "np.einsum(\"ab...,b...->a...\", rot_matrix, q_hat) # Should write a function to do this as well,", "phi must have the same shape\") thx = np.cos(phi) * np.cos(theta) thy =", "doing spherical coordinate transformations on vectors.\"\"\" import numpy as np def r_hat(theta, phi):", "Software Group # Licensed under the 3-clause BSD License \"\"\"Methods for doing spherical", "relates two coordinate charts. The accuracy of this method may not be good", "= np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta,", "np.cos(phi) * np.cos(theta) thy = np.sin(phi) * np.cos(theta) thz = -np.sin(theta) return np.stack((thx,", "rot_matrix : array-like of float rotation matrix to use theta, phi : float", "theta, phi coordinates for the point on the sphere (using normal mathematical conventions)", "3D rotation matrix that relates two coordinate charts. The accuracy of this method", "R is the 3D rotation matrix that relates two coordinate charts. The accuracy", "1.0 beta = np.arccos(p_hat[2]) alpha = np.arctan2(p_hat[1], p_hat[0]) if alpha < 0: alpha", "rotates the basis vectors associated with (`theta`, `phi`) to those associated with (`beta`,", ") r1 = r_hat(theta1, phi1) r2 = r_hat(theta2, phi2) assert is_unit_vector(r1), \"r1 is", "False otherwise. \"\"\" return np.allclose(np.dot(vec, vec), 1, atol=tol) def vecs2rot(r1=None, r2=None, theta1=None, phi1=None,", "i.e., pull back angles from # a vector if np.isclose(p_hat[2], 1.0, rtol=0.0, atol=1e-12):", "I_matrix + np.sin(angle) * K_matrix + (1.0 - np.cos(angle)) * np.dot(K_matrix, K_matrix) )", "np.sin(phi) * np.sin(theta) rhz = np.cos(theta) return np.stack((rhx, rhy, rhz)) def theta_hat(theta, phi):", "np.stack((q_hat_1, q_hat_2, q_hat_3)) # Should test for shape of p_hat p_hat = np.einsum(\"ab...,b...->a...\",", "BSD License \"\"\"Methods for doing spherical coordinate transformations on vectors.\"\"\" import numpy as", "r2 = np.array(r2) if r1.shape != (3,) or r2.shape != (3,): raise ValueError(\"r1", "which will typically be an ICRS RA/Dec coordinate appropriately converted, and the point" ]
[ "if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name = track_info['item']['name']", "on a provided device. \"\"\" def play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id)", "case \"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album =", "init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data = file.readlines() scope = 'user-library-read user-read-playback-state", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data = file.readlines()", "grab the available devices user can interact with. \"\"\" def get_avail_devices(sp): avail_devices =", "available devices user can interact with. \"\"\" def get_avail_devices(sp): avail_devices = dict() results", "int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\")", "= SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user for which", "if msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected", "{track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id,", "device_id, msg) track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name}", "{SERVER}\") # Placed API build inside of start fuhnction for organization auth_manager =", "track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist})", "import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT = 5050 SERVER", "the host\".encode(FORMAT)) connected = True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length", "device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected", "\"\"\" Asks user for which playback device they'd like and returns the device", "connected.\") # conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected = True while connected:", "valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the available devices user", "msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match", "connected to the host\".encode(FORMAT)) connected = True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if", "-1 while user_input-1 not in range(len(device_names)): try: print() for i in range(len(device_names)): print(f\"({i+1})", "a provided track on a provided device. \"\"\" def play_track(sp, device_id, track_id): uris_list", "{track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name =", "playback device they'd like and returns the device id. \"\"\" def select_device(avail_devices): device_names", "for which playback device they'd like and returns the device id. \"\"\" def", "i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are no available devices.\")", "and returns the device id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names =", "your player.\") user_input = int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter a valid", "while True: if device_id == None: break conn, addr = server.accept() thread =", "spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR", "= spotipy.Spotify(auth_manager=auth_manager) # Host must select device player when initializing server. avail_devices =", "if device_id == None: break conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn,", "devices user can interact with. \"\"\" def get_avail_devices(sp): avail_devices = dict() results =", "| Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg)", "= select_device(avail_devices) while True: if device_id == None: break conn, addr = server.accept()", "= [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) == 0: return user_input = -1", "when initializing server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while True: if device_id", "SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user for which playback", "track on a provided device. \"\"\" def play_track(sp, device_id, track_id): uris_list = []", "results = sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0: for i in range(len(results['devices'])):", "auth_manager \"\"\" Asks user for which playback device they'd like and returns the", "on {SERVER}\") # Placed API build inside of start fuhnction for organization auth_manager", "the available devices user can interact with. \"\"\" def get_avail_devices(sp): avail_devices = dict()", "sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} |", "{track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server is listening on", "the number that corresponds with your player.\") user_input = int(input(\"> \")) except ValueError:", "a provided device. \"\"\" def play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id,", "server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data", "True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg =", "scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope)", "Server is listening on {SERVER}\") # Placed API build inside of start fuhnction", "while user_input-1 not in range(len(device_names)): try: print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\")", "# print(len(results['devices'])) if len(results['devices']) != 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id']", "connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if", "SpotifyOAuth HEADER = 64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER,", "'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager", "scope=scope) return auth_manager \"\"\" Asks user for which playback device they'd like and", "server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data = file.readlines() scope =", "\"\"\" def play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn,", "= \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as", "socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server =", "conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE:", "\"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) == 0:", "True: if device_id == None: break conn, addr = server.accept() thread = threading.Thread(target=handle_client,", "SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\"", "# Host must select device player when initializing server. avail_devices = get_avail_devices(sp) device_id", "avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the available devices user can interact with.", "with. \"\"\" def get_avail_devices(sp): avail_devices = dict() results = sp.devices() # print(len(results['devices'])) if", "# Placed API build inside of start fuhnction for organization auth_manager = init_auth_manager()", "threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server", "= -1 while user_input-1 not in range(len(device_names)): try: print() for i in range(len(device_names)):", "def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected", "= int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter a valid number.\") return avail_devices[device_names[user_input-1]]", "conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE", "sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to", "conn.close() def start(): server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\") # Placed API", "get_avail_devices(sp) device_id = select_device(avail_devices) while True: if device_id == None: break conn, addr", "avail_devices \"\"\" Plays a provided track on a provided device. \"\"\" def play_track(sp,", "Please enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the", "play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp,", "track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT))", "from gettext import find import socket import threading import spotipy from spotipy.oauth2 import", "id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) ==", "= conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match msg:", "auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user for", "start fuhnction for organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must", "from spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname())", "Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name", "= sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']]", "range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that corresponds with your player.\") user_input", "0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are no", "in range(len(device_names)): try: print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the", "listening on {SERVER}\") # Placed API build inside of start fuhnction for organization", "track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW", "= False print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info = sp.currently_playing() track_name =", "ValueError: print(\"[ERROR] Please enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to", "device_id == None: break conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr,", "print(\"[ERROR] Please enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab", "available devices.\") return avail_devices \"\"\" Plays a provided track on a provided device.", "DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info = sp.currently_playing()", "track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg:", "while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT)", "PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def", "number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the available devices user can", "track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist:", "device_id = select_device(avail_devices) while True: if device_id == None: break conn, addr =", "= file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(),", "return user_input = -1 while user_input-1 not in range(len(device_names)): try: print() for i", "thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\")", "print(f\"[LISTENING] Server is listening on {SERVER}\") # Placed API build inside of start", "| Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info = sp.currently_playing()", "play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED]", "msg) track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by", "= track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close()", "device id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names)", "get_avail_devices(sp): avail_devices = dict() results = sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0:", "user_input = -1 while user_input-1 not in range(len(device_names)): try: print() for i in", "Asks user for which playback device they'd like and returns the device id.", "int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\"", "sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select device player when initializing server. avail_devices", "= threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING]", "API to grab the available devices user can interact with. \"\"\" def get_avail_devices(sp):", "\"\"\" def get_avail_devices(sp): avail_devices = dict() results = sp.devices() # print(len(results['devices'])) if len(results['devices'])", "organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select device player", "uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION]", "device. \"\"\" def play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def", "to grab the available devices user can interact with. \"\"\" def get_avail_devices(sp): avail_devices", "i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that corresponds with your", "'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt',", "added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\")", "match msg: case \"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name']", "device_names = list(avail_devices.keys()) if len(device_names) == 0: return user_input = -1 while user_input-1", "number that corresponds with your player.\") user_input = int(input(\"> \")) except ValueError: print(\"[ERROR]", "is listening on {SERVER}\") # Placed API build inside of start fuhnction for", "player.\") user_input = int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter a valid number.\")", "{track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info =", "fuhnction for organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select", "user_input = int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter a valid number.\") return", "are no available devices.\") return avail_devices \"\"\" Plays a provided track on a", "as file: cred_data = file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager =", "= server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count()", "Host must select device player when initializing server. avail_devices = get_avail_devices(sp) device_id =", "interact with. \"\"\" def get_avail_devices(sp): avail_devices = dict() results = sp.devices() # print(len(results['devices']))", "no available devices.\") return avail_devices \"\"\" Plays a provided track on a provided", "print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that corresponds", "user for which playback device they'd like and returns the device id. \"\"\"", "import socket import threading import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER = 64", "socket import threading import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT", "return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the available devices user can interact", "user can interact with. \"\"\" def get_avail_devices(sp): avail_devices = dict() results = sp.devices()", "server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\") # Placed API build inside of", "Placed API build inside of start fuhnction for organization auth_manager = init_auth_manager() sp", "= sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added", "sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT))", "range(len(device_names)): try: print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number", "({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server is", "conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match msg: case", "= track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist}", "file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(),", "client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user for which playback device they'd", "in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that corresponds with your player.\")", "server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while True: if device_id == None:", "enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the available", "64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT =", "= [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr}", "must select device player when initializing server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices)", "with your player.\") user_input = int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter a", "addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS]", "devices.\") return avail_devices \"\"\" Plays a provided track on a provided device. \"\"\"", "inside of start fuhnction for organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) #", "that corresponds with your player.\") user_input = int(input(\"> \")) except ValueError: print(\"[ERROR] Please", "auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select device player when", "print() print(\"Enter the number that corresponds with your player.\") user_input = int(input(\"> \"))", "= results['devices'][i]['id'] else: print(\"[ERROR] There are no available devices.\") return avail_devices \"\"\" Plays", "\"\"\" Calls API to grab the available devices user can interact with. \"\"\"", "PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT = 'utf-8'", "= init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select device player when initializing", "range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are no available devices.\") return avail_devices", "= track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen()", "select_device(avail_devices) while True: if device_id == None: break conn, addr = server.accept() thread", "provided device. \"\"\" def play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list)", "msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg", "track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\")", "= sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name}", "= 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE", "file: cred_data = file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth(", "open('token-data_server.txt', 'r') as file: cred_data = file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing'", "cred_data = file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(),", "Plays a provided track on a provided device. \"\"\" def play_track(sp, device_id, track_id):", "return avail_devices \"\"\" Plays a provided track on a provided device. \"\"\" def", "break conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start()", "spotipy from spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT = 5050 SERVER =", "ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET,", "== DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info =", "= 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return", "uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") #", "= dict() results = sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0: for i", "[*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) == 0: return user_input = -1 while", "(\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist", "start(): server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\") # Placed API build inside", "socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data = file.readlines() scope", "init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select device player when initializing server.", "\")) except ValueError: print(\"[ERROR] Please enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls", "spotipy.Spotify(auth_manager=auth_manager) # Host must select device player when initializing server. avail_devices = get_avail_devices(sp)", "device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id):", "def start(): server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\") # Placed API build", "a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API to grab the available devices", "connected = True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length)", "they'd like and returns the device id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices]", "import SpotifyOAuth HEADER = 64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR =", "addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is stopping...\")", "print(len(results['devices'])) if len(results['devices']) != 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else:", "sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is stopping...\") print(\"[STARTING]", "DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r')", "'r') as file: cred_data = file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager", "user-read-playback-state user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\"", "= conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg ==", "provided track on a provided device. \"\"\" def play_track(sp, device_id, track_id): uris_list =", "can interact with. \"\"\" def get_avail_devices(sp): avail_devices = dict() results = sp.devices() #", "def get_avail_devices(sp): avail_devices = dict() results = sp.devices() # print(len(results['devices'])) if len(results['devices']) !=", "initializing server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while True: if device_id ==", "{addr} connected.\") # conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected = True while", "Calls API to grab the available devices user can interact with. \"\"\" def", "which playback device they'd like and returns the device id. \"\"\" def select_device(avail_devices):", "threading import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT = 5050", "print(\"[ERROR] There are no available devices.\") return avail_devices \"\"\" Plays a provided track", "by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server is listening", "msg: case \"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album", "device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is stopping...\") print(\"[STARTING] server", "import find import socket import threading import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER", "print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected =", "{device_names[i]}\") print() print(\"Enter the number that corresponds with your player.\") user_input = int(input(\">", "CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected = True", "player when initializing server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while True: if", "= True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length = int(msg_length) msg", "track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} |", "avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while True: if device_id == None: break", "client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user for which playback device", "False print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name']", "with open('token-data_server.txt', 'r') as file: cred_data = file.readlines() scope = 'user-library-read user-read-playback-state user-modify-playback-state", "the device id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys()) if", "device they'd like and returns the device id. \"\"\" def select_device(avail_devices): device_names =", "return auth_manager \"\"\" Asks user for which playback device they'd like and returns", "\"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name']", "= socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server", "to the host\".encode(FORMAT)) connected = True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length:", "(SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR)", "sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED]", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data =", "if len(device_names) == 0: return user_input = -1 while user_input-1 not in range(len(device_names)):", "HEADER = 64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT)", "thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is stopping...\") print(\"[STARTING] server is", "if len(results['devices']) != 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR]", "corresponds with your player.\") user_input = int(input(\"> \")) except ValueError: print(\"[ERROR] Please enter", "None: break conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id))", "gettext import find import socket import threading import spotipy from spotipy.oauth2 import SpotifyOAuth", "CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is stopping...\") print(\"[STARTING] server is starting...\") start()", "track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def", "for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are no available", "def play_track(sp, device_id, track_id): uris_list = [] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr,", "print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is stopping...\") print(\"[STARTING] server is starting...\")", "Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp, device_id, msg) track_info", "in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are no available devices.\") return", "msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected = False", "to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\") #", "track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album:", "redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user for which playback device they'd like", "sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] =", "queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server is listening on {SERVER}\") # Placed", "not in range(len(device_names)): try: print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter", "FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager():", "list(avail_devices.keys()) if len(device_names) == 0: return user_input = -1 while user_input-1 not in", "msg: play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name']", "= track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in", "for organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host must select device", "device_names = [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) == 0: return user_input =", "track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING]", "avail_devices = dict() results = sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0: for", "len(results['devices']) != 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There", "connected = False print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info = sp.currently_playing() track_name", "server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() -", "# conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected = True while connected: msg_length", "build inside of start fuhnction for organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager)", "conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if (\"https://open.spotify.com/track/\") in msg: play_track(sp,", "user_input-1 not in range(len(device_names)): try: print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print()", "track_artist = track_info['item']['album']['artists'][0]['name'] conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start():", "= list(avail_devices.keys()) if len(device_names) == 0: return user_input = -1 while user_input-1 not", "results['devices'][i]['id'] else: print(\"[ERROR] There are no available devices.\") return avail_devices \"\"\" Plays a", "host\".encode(FORMAT)) connected = True while connected: msg_length = conn.recv(HEADER).decode(FORMAT) if msg_length: msg_length =", "= (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "You connected to the host\".encode(FORMAT)) connected = True while connected: msg_length = conn.recv(HEADER).decode(FORMAT)", "= get_avail_devices(sp) device_id = select_device(avail_devices) while True: if device_id == None: break conn,", "\"\"\" Plays a provided track on a provided device. \"\"\" def play_track(sp, device_id,", "of start fuhnction for organization auth_manager = init_auth_manager() sp = spotipy.Spotify(auth_manager=auth_manager) # Host", "== None: break conn, addr = server.accept() thread = threading.Thread(target=handle_client, args=(conn, addr, sp,", "avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are no available devices.\") return avail_devices \"\"\"", "track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist = track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name:", "returns the device id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys())", "addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected to the", "== 0: return user_input = -1 while user_input-1 not in range(len(device_names)): try: print()", "API build inside of start fuhnction for organization auth_manager = init_auth_manager() sp =", "= 64 PORT = 5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT", "print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that corresponds with your player.\") user_input =", "handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You connected to", "conn.send(\"[CONNECTED] You connected to the host\".encode(FORMAT)) connected = True while connected: msg_length =", "import threading import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER = 64 PORT =", "for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that corresponds with", "5050 SERVER = socket.gethostbyname(socket.gethostname()) ADDR = (SERVER, PORT) FORMAT = 'utf-8' DISCONNECT_MESSAGE =", "= track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT))", "def select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) == 0: return", "There are no available devices.\") return avail_devices \"\"\" Plays a provided track on", "= int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}]", "select device player when initializing server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while", "user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks user", "print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist", "track_info['item']['album']['artists'][0]['name'] track_album = track_info['item']['album']['name'] conn.send(f\"Name: {track_name} | Artist: {track_artist} | Album: {track_album}\".encode(FORMAT)) if", "{msg}\") match msg: case \"playing\": track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist =", "find import socket import threading import spotipy from spotipy.oauth2 import SpotifyOAuth HEADER =", "else: print(\"[ERROR] There are no available devices.\") return avail_devices \"\"\" Plays a provided", "msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match msg: case \"playing\": track_info", "print(\"Enter the number that corresponds with your player.\") user_input = int(input(\"> \")) except", "msg_length: msg_length = int(msg_length) msg = conn.recv(msg_length).decode(FORMAT) if msg == DISCONNECT_MESSAGE: connected =", "0: return user_input = -1 while user_input-1 not in range(len(device_names)): try: print() for", "select_device(avail_devices): device_names = [*avail_devices] device_names = list(avail_devices.keys()) if len(device_names) == 0: return user_input", "\"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with open('token-data_server.txt', 'r') as file:", "user-modify-playback-state user-read-currently-playing' auth_manager = SpotifyOAuth( client_id=cred_data[0].strip(), client_secret=cred_data[1].strip(), redirect_uri=cred_data[2].strip(), scope=scope) return auth_manager \"\"\" Asks", "like and returns the device id. \"\"\" def select_device(avail_devices): device_names = [*avail_devices] device_names", "dict() results = sp.devices() # print(len(results['devices'])) if len(results['devices']) != 0: for i in", "[] uris_list.append(track_id) sp.start_playback(device_id=device_id, uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\")", "args=(conn, addr, sp, device_id)) thread.start() print(f\"[ACTIVE CONNECTIONS] {threading.active_count() - 1}\") print(\"[CLOSING] server is", "if msg == DISCONNECT_MESSAGE: connected = False print(f\"[{addr}] {msg}\") match msg: case \"playing\":", "conn.send(f\"[ADDED] ({track_name} by {track_artist}) added to queue.\".encode(FORMAT)) conn.close() def start(): server.listen() print(f\"[LISTENING] Server", "try: print() for i in range(len(device_names)): print(f\"({i+1}) {device_names[i]}\") print() print(\"Enter the number that", "!= 0: for i in range(len(results['devices'])): avail_devices[results['devices'][i]['name']] = results['devices'][i]['id'] else: print(\"[ERROR] There are", "def init_auth_manager(): with open('token-data_server.txt', 'r') as file: cred_data = file.readlines() scope = 'user-library-read", "uris=uris_list) def handle_client(conn, addr, sp, device_id): print(f\"[NEW CONNECTION] {addr} connected.\") # conn.send(\"[CONNECTED] You", "len(device_names) == 0: return user_input = -1 while user_input-1 not in range(len(device_names)): try:", "except ValueError: print(\"[ERROR] Please enter a valid number.\") return avail_devices[device_names[user_input-1]] \"\"\" Calls API", "= 'utf-8' DISCONNECT_MESSAGE = \"!DISCONNECT\" server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.bind(ADDR) def init_auth_manager(): with", "in msg: play_track(sp, device_id, msg) track_info = sp.currently_playing() track_name = track_info['item']['name'] track_artist =", "device player when initializing server. avail_devices = get_avail_devices(sp) device_id = select_device(avail_devices) while True:" ]
[ "HTTPStatus import requests from cleo import Command from clikit.api.io import flags from .constants", "to see if it's available?} \"\"\" def handle(self): name = self.argument(\"name\") url =", "if it's available?} \"\"\" def handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with", "AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the availability of a", "from http import HTTPStatus import requests from cleo import Command from clikit.api.io import", "package name in npm npm {name : What package name do you want", "npm npm {name : What package name do you want to see if", "= f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r = s.get(url) status_code = r.status_code updated_url", "Command from clikit.api.io import flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL,", "self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code == 404 if is_available: self.line(AVAILABLE_MSG.format(name=name))", "from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check", "HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the availability of a package", "import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the availability", "HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code == 404 if is_available:", "{name : What package name do you want to see if it's available?}", "name in npm npm {name : What package name do you want to", "see if it's available?} \"\"\" def handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\"", "verbosity=flags.VERBOSE, ) is_available = status_code == 404 if is_available: self.line(AVAILABLE_MSG.format(name=name)) else: self.line(NOT_AVAILABLE_MSG.format(name=name, url=updated_url))", "\"\"\" Check the availability of a package name in npm npm {name :", "r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available", "class NpmCommand(Command): \"\"\" Check the availability of a package name in npm npm", "name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r = s.get(url)", "= self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r = s.get(url) status_code", ") class NpmCommand(Command): \"\"\" Check the availability of a package name in npm", "handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r =", "url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r = s.get(url) status_code = r.status_code", "= r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, )", "you want to see if it's available?} \"\"\" def handle(self): name = self.argument(\"name\")", "as s: r = s.get(url) status_code = r.status_code updated_url = r.url status_code_description =", "s.get(url) status_code = r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description),", "want to see if it's available?} \"\"\" def handle(self): name = self.argument(\"name\") url", "status_code = r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE,", "s: r = s.get(url) status_code = r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase", "http import HTTPStatus import requests from cleo import Command from clikit.api.io import flags", "npm {name : What package name do you want to see if it's", "( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the availability of", "NpmCommand(Command): \"\"\" Check the availability of a package name in npm npm {name", "in npm npm {name : What package name do you want to see", "do you want to see if it's available?} \"\"\" def handle(self): name =", "cleo import Command from clikit.api.io import flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG,", "import HTTPStatus import requests from cleo import Command from clikit.api.io import flags from", "self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r = s.get(url) status_code =", "availability of a package name in npm npm {name : What package name", "from clikit.api.io import flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, )", "HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code == 404 if is_available: self.line(AVAILABLE_MSG.format(name=name)) else:", "NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the availability of a package name in", "clikit.api.io import flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class", "import Command from clikit.api.io import flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG,", "requests.Session() as s: r = s.get(url) status_code = r.status_code updated_url = r.url status_code_description", "status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code == 404", "flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\"", "What package name do you want to see if it's available?} \"\"\" def", "f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r = s.get(url) status_code = r.status_code updated_url =", ": What package name do you want to see if it's available?} \"\"\"", "r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code ==", "desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code == 404 if is_available: self.line(AVAILABLE_MSG.format(name=name)) else: self.line(NOT_AVAILABLE_MSG.format(name=name,", "a package name in npm npm {name : What package name do you", "name do you want to see if it's available?} \"\"\" def handle(self): name", "def handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s: r", "with requests.Session() as s: r = s.get(url) status_code = r.status_code updated_url = r.url", "available?} \"\"\" def handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as", "import requests from cleo import Command from clikit.api.io import flags from .constants import", "NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the availability of a package name", "= HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code == 404 if", "it's available?} \"\"\" def handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session()", "import flags from .constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command):", "Check the availability of a package name in npm npm {name : What", ".constants import ( AVAILABLE_MSG, HTTP_STATUS_CODE_MSG, NOT_AVAILABLE_MSG, NPM_BASE_URL, ) class NpmCommand(Command): \"\"\" Check the", "the availability of a package name in npm npm {name : What package", "= s.get(url) status_code = r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code,", "\"\"\" def handle(self): name = self.argument(\"name\") url = f\"{NPM_BASE_URL}{name}\" with requests.Session() as s:", "requests from cleo import Command from clikit.api.io import flags from .constants import (", "= r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available = status_code", "of a package name in npm npm {name : What package name do", "updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line( HTTP_STATUS_CODE_MSG.format(code=status_code, desc=status_code_description), verbosity=flags.VERBOSE, ) is_available =", "from cleo import Command from clikit.api.io import flags from .constants import ( AVAILABLE_MSG,", "r = s.get(url) status_code = r.status_code updated_url = r.url status_code_description = HTTPStatus(status_code).phrase self.line(", "package name do you want to see if it's available?} \"\"\" def handle(self):" ]
[ "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "To invoke a command on multiple EC2 instances with one rule, you can", "on resource-based policies. For EC2 instances, Amazon Kinesis streams, and AWS Step Functions", "of the PutEvents call is used. Source (string) --The source of the event.", "state machines, CloudWatch Events relies on IAM roles that you specify in the", "SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if an operation can be paginated. :type", "use on the generated url. By default, the http method is whatever is", "event, resources associated with the event, and so on. (dict) --Represents an event", "Amazon Resource Names (ARNs). However, CloudWatch Events uses an exact match in event", "EC2 instances, Amazon Kinesis streams, and AWS Step Functions state machines, CloudWatch Events", "string :param Name: [REQUIRED] The name of the rule. :rtype: dict :return: {", "Be sure to use the correct ARN characters when creating event patterns so", "the specified targets from the specified rule. When the rule is triggered, those", ":rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string',", "this software and associated documentation files (the \"Software\"), to deal in the Software", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\"", "[ { 'Key': 'string', 'Values': [ 'string', ] }, ] }, 'EcsParameters': {", "'string', 'RoleArn': 'string' }, ], 'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None,", "EventPattern: string :param EventPattern: The event pattern. For more information, see Events and", "remove a target, when the associated rule triggers, removed targets might continue to", "CloudWatch Events needs the appropriate permissions. For AWS Lambda and Amazon SNS resources,", "see Authentication and Access Control in the Amazon CloudWatch Events User Guide .", "Name: string :param Name: [REQUIRED] The name of the rule. \"\"\" pass def", "for extracting part of the matched event when passing it to the target.", "Note that creating rules with built-in targets is supported only in the AWS", "multiple values. Key (string) -- [REQUIRED]Can be either tag: tag-key or InstanceIds .", "HttpMethod: The http method to use on the generated url. By default, the", "the Software without restriction, including without limitation the rights to use, copy, modify,", "DisableRule . When you create or update a rule, incoming events might not", "the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "is provided, the timestamp of the PutEvents call is used. Source (string) --The", "matching to new or updated rules. Please allow a short period of time", "API Documentation :example: response = client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string',", "Management Console. For some target types, PutTargets provides target-specific parameters. If the target", "[REQUIRED] The name of the rule. :type Targets: list :param Targets: [REQUIRED] The", "is triggered, those targets are no longer be invoked. When you remove a", "without restriction, including without limitation the rights to use, copy, modify, merge, publish,", "ExpiresIn: int :param ExpiresIn: The number of seconds the presigned url is valid", "(string) -- [REQUIRED]Can be either tag: tag-key or InstanceIds . Values (list) --", "against the event pattern. :rtype: dict :return: { 'Result': True|False } \"\"\" pass", "a rule and the associated rule triggers soon after, new or updated targets", "provides the ID of the failed target and the error code. See also:", "must use JSON dot notation, not bracket notation. For more information, see The", ":returns: Rule (string) -- [REQUIRED] The name of the rule. Targets (list) --", "IAM role for each target. Input (string) --Valid JSON text passed to the", "(ARN) of the target resource. :type NextToken: string :param NextToken: The token returned", "made at the same time. If that happens, FailedEntryCount is non-zero in the", "create_foo operation can be paginated, you can use the call client.get_paginator('create_foo'). \"\"\" pass", "an operation can be paginated. :type operation_name: string :param operation_name: The operation name.", "Enables the specified rule. If the rule does not exist, the operation fails.", "tasks, see Task Definitions in the Amazon EC2 Container Service Developer Guide .", "Amazon EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support including only one", "you want to match. See also: AWS API Documentation :example: response = client.put_rule(", "API Documentation :example: response = client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type Rule:", "ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string :param Name: [REQUIRED] The", "sublicense, and/or sell copies of the Software, and to permit persons to whom", "HttpMethod=None): \"\"\" Generate a presigned url given a client, its method, and arguments", "scheduling expression. For example, 'cron(0 20 * * ? *)', 'rate(5 minutes)'. :type", "description of the rule. :type RoleArn: string :param RoleArn: The Amazon Resource Name", "'string', ] ) :type Rule: string :param Rule: [REQUIRED] The name of the", "the error code. See also: AWS API Documentation :example: response = client.remove_targets( Rule='string',", "targets if they are already associated with the rule. Targets are the resources", "if the event target is an Amazon ECS task. For more information about", "so on. (dict) --Represents an event to be submitted. Time (datetime) --The timestamp", "By default it expires in an hour (3600 seconds) :type HttpMethod: string :param", "the rule is enabled or disabled. :type Description: string :param Description: A description", "the eventId as the partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON path to", "'InputTransformer': { 'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath':", "notice and this permission notice shall be included in all copies or substantial", "next set of results. :type Limit: integer :param Limit: The maximum number of", "Developer Guide . RunCommandParameters (dict) --Parameters used when you are using the rule", ":type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned url is", "rule name. :type NextToken: string :param NextToken: The token returned by a previous", "the Amazon Kinesis Streams Developer Guide . RunCommandParameters (dict) --Parameters used when you", "with the rule. :rtype: dict :return: { 'RuleArn': 'string' } \"\"\" pass def", "the target. Arn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the target.", "bracket notation. (string) -- (string) -- InputTemplate (string) -- [REQUIRED]Input template where you", "whether the rule is enabled or disabled. :type Description: string :param Description: A", "information, see Authentication and Access Control in the Amazon CloudWatch Events User Guide", "itself is passed to the target. You must use JSON dot notation, not", "is an Amazon ECS task. For more information about Amazon ECS tasks, see", "on the given schedule. A rule can have both an EventPattern and a", ":example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule: string :param Rule:", "name of the rule. Targets (list) -- [REQUIRED] The targets to update or", "url given a client, its method, and arguments :type ClientMethod: string :param ClientMethod:", "TargetArn: string :param TargetArn: [REQUIRED] The Amazon Resource Name (ARN) of the target", "correct ARN characters when creating event patterns so that they match the ARN", "string :param ClientMethod: The client method to presign for :type Params: dict :param", "string :param EventPattern: [REQUIRED] The event pattern. For more information, see Events and", "the rule. (dict) --Targets are the resources to be invoked when a rule", "of the state. You can disable a rule using DisableRule . When you", "syntax in the event you want to match. See also: AWS API Documentation", "rules for the specified target. You can see which of the rules in", "}, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [", ":type Entries: list :param Entries: [REQUIRED] The entry that defines an event in", "], 'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the", "RunCommandParameters (dict) --Parameters used when you are using the rule to invoke Amazon", "use the correct ARN characters when creating event patterns so that they match", "the resources to be invoked when a rule is triggered. Target types include", "ExpiresIn: The number of seconds the presigned url is valid for. By default", "can be paginated. :type operation_name: string :param operation_name: The operation name. This is", "won't self-trigger if it has a schedule expression. When you disable a rule,", "no timestamp is provided, the timestamp of the PutEvents call is used. Source", "'cron(0 20 * * ? *)', 'rate(5 minutes)'. :type EventPattern: string :param EventPattern:", "in the Amazon CloudWatch Events User Guide . :type State: string :param State:", "ScheduleExpression. Rules with EventPatterns are triggered when a matching event is observed. Rules", "'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None,", "the targets if they are already associated with the rule. Targets are the", "target. In this case, nothing from the event itself is passed to the", "argument in PutTarget . For more information, see Authentication and Access Control in", "block, which specifies either an array of InstanceIds or a tag. (dict) --Information", "Amazon CloudWatch Events User Guide . When you specify Input , InputPath ,", "either list all the rules or you can provide a prefix to match", "rules in Amazon CloudWatch Events can invoke a specific target in your account.", "to deal in the Software without restriction, including without limitation the rights to", "event and then use that data to send customized input to the target.", "timestamp of the PutEvents call is used. Source (string) --The source of the", "time for changes to take effect. A rule must contain at least an", "Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support including only one RunCommandTarget block, which", "Limit: The maximum number of results to return. :rtype: dict :return: { 'Targets':", "also: AWS API Documentation :example: response = client.enable_rule( Name='string' ) :type Name: string", "the partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON path to be extracted from", ":param Event: [REQUIRED] The event, in JSON format, to test against the event", "the associated rule triggers, removed targets might continue to be invoked. Please allow", "See also: AWS API Documentation :example: response = client.delete_rule( Name='string' ) :type Name:", "rule, incoming events might continue to match to the disabled rule. Please allow", "Software without restriction, including without limitation the rights to use, copy, modify, merge,", "event is observed. Rules with ScheduleExpressions self-trigger based on the given schedule. A", "the event primarily concerns. Any number, including zero, may be present. (string) --", "Disables the specified rule. A disabled rule won't match any events, and won't", "RoleArn: The Amazon Resource Name (ARN) of the IAM role associated with the", "targets to remove from the rule. (string) -- :rtype: dict :return: { 'FailedEntryCount':", "fail if too many requests are made at the same time. If that", "as the same character in Amazon Resource Names (ARNs). However, CloudWatch Events uses", "The maximum number of results to return. :rtype: dict :return: { 'Rules': [", "you add targets to a rule and the associated rule triggers soon after,", "those targets are no longer be invoked. When you remove a target, when", "in your system. You can specify several parameters for the entry such as", "from the rule. (string) -- :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [", "specified event pattern matches the provided event. Most services in AWS treat :", "IDs of the targets to remove from the rule. (string) -- :rtype: dict", "format, to test against the event pattern. :rtype: dict :return: { 'Result': True|False", "USE OR OTHER DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if", "FailedEntryCount is non-zero in the response and each entry in FailedEntries provides the", "events might continue to match to the disabled rule. Please allow a short", "url is valid for. By default it expires in an hour (3600 seconds)", "rule. Targets are the resources that are invoked when a rule is triggered.", "targets include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS tasks,", "Resource Name (ARN), which the event primarily concerns. Any number, including zero, may", "pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or updates the", ":type ClientMethod: string :param ClientMethod: The client method to presign for :type Params:", "}, ] ) :type Entries: list :param Entries: [REQUIRED] The entry that defines", "are creating or updating. :type ScheduleExpression: string :param ScheduleExpression: The scheduling expression. For", "\"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or updates", "{ 'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters':", "AWS treat : or / as the same character in Amazon Resource Names", "default it expires in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod:", ":type TargetArn: string :param TargetArn: [REQUIRED] The Amazon Resource Name (ARN) of the", "more information, see Amazon Kinesis Streams Key Concepts in the Amazon Kinesis Streams", "patterns so that they match the ARN syntax in the event you want", "specified rule, or updates the targets if they are already associated with the", "be extracted from the event and used as the partition key. For more", "{ 'RunCommandTargets': [ { 'Key': 'string', 'Values': [ 'string', ] }, ] },", "and type of the event, resources associated with the event, and so on.", "datetime(2015, 1, 1), 'Source': 'string', 'Resources': [ 'string', ], 'DetailType': 'string', 'Detail': 'string'", "free of charge, to any person obtaining a copy of this software and", "and this permission notice shall be included in all copies or substantial portions", "= client.disable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of", "pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch Events rules. You", "to provide custom input to a target based on certain event data. You", "bracket notation. When you add targets to a rule and the associated rule", "You can see which of the rules in Amazon CloudWatch Events can invoke", "RunCommandTarget block, which specifies either an array of InstanceIds or a tag. (dict)", "on the TaskDefinition . The default is one. \"\"\" pass def remove_targets(Rule=None, Ids=None):", "the specified event pattern matches the provided event. Most services in AWS treat", "Notation (JSON) Data Interchange Format . InputPath (string) --The value of the JSONPath", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "they can be matched to rules. See also: AWS API Documentation :example: response", "not bracket notation. For more information, see The JavaScript Object Notation (JSON) Data", "triggers, removed targets might continue to be invoked. Please allow a short period", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None):", "system. You can specify several parameters for the entry such as the source", "rules with built-in targets is supported only in the AWS Management Console. For", "and built-in targets. Id (string) -- [REQUIRED]The ID of the target. Arn (string)", "continue to match to the disabled rule. Please allow a short period of", "immediately start matching to a newly enabled rule. Please allow a short period", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "The event pattern. For more information, see Events and Event Patterns in the", "state. You can disable a rule using DisableRule . When you create or", "a rule using DisableRule . When you create or update a rule, incoming", "an event to be submitted. Time (datetime) --The timestamp of the event, per", "timestamp of the event, per RFC3339 . If no timestamp is provided, the", "use JSON dot notation, not bracket notation. When you add targets to a", "ID of the target. Arn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of", ":param Entries: [REQUIRED] The entry that defines an event in your system. You", "rule won't match any events, and won't self-trigger if it has a schedule", "target. RoleArn (string) --The Amazon Resource Name (ARN) of the IAM role to", "incoming events might not immediately start matching to new or updated rules. Please", "EcsParameters (dict) --Contains the Amazon ECS task definition and task count to be", "a rule, incoming events might continue to match to the deleted rule. Please", "notice shall be included in all copies or substantial portions of the Software.", "presigned url is valid for. By default it expires in an hour (3600", "(dict) --The custom parameter you can use to control shard assignment, when the", "rule, incoming events might continue to match to the deleted rule. Please allow", "AWS API Documentation :example: response = client.describe_rule( Name='string' ) :type Name: string :param", ":rtype: dict :return: { 'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\"", "appropriate permissions. For AWS Lambda and Amazon SNS resources, CloudWatch Events relies on", "returned by a previous call to retrieve the next set of results. :type", "target is an Amazon ECS cluster. TaskCount (integer) --The number of tasks to", "hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http method to use", "the associated rule triggers soon after, new or updated targets might not be", "but this key may specify multiple values. Key (string) -- [REQUIRED]Can be either", "they are already associated with the rule. Targets are the resources that are", "These are key-value pairs, where each value is a JSON path. You must", "The name of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\"", "put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets to the specified rule, or updates", "rule. See also: AWS API Documentation :example: response = client.describe_rule( Name='string' ) :type", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "parameter, the default is to use the eventId as the partition key. PartitionKeyPath", "IDs. (string) -- EcsParameters (dict) --Contains the Amazon ECS task definition and task", "AWS API Documentation :example: response = client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type", "updates the targets if they are already associated with the rule. Targets are", ":type Params: dict :param Params: The parameters normally passed to ClientMethod. :type ExpiresIn:", "method is whatever is used in the method's model. \"\"\" pass def get_paginator(operation_name=None):", "The IDs of the targets to remove from the rule. (string) -- :rtype:", "The client method to presign for :type Params: dict :param Params: The parameters", "use the RunCommandParameters field. To be able to make API calls against the", "least an EventPattern or ScheduleExpression. Rules with EventPatterns are triggered when a matching", "specified rule. Rules are enabled by default, or based on value of the", "Amazon CloudWatch Events can invoke a specific target in your account. See also:", "as the method name on the client. For example, if the method name", "data sent to the target. KinesisParameters (dict) --The custom parameter you can use", "submitted. Time (datetime) --The timestamp of the event, per RFC3339 . If no", "put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or updates the specified rule.", "the target. You must use JSON dot notation, not bracket notation. For more", "event patterns and rules. Be sure to use the correct ARN characters when", "the Amazon ECS task definition and task count to be used, if the", ":example: response = client.describe_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The", "ECS tasks, AWS Step Functions state machines, Run Command, and built-in targets. Id", "this case, nothing from the event itself is passed to the target. You", "'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key':", "Key is InstanceIds , Values is a list of Amazon EC2 instance IDs.", "or updated targets might not be immediately invoked. Please allow a short period", "which may also contain nested subobjects. No constraints are imposed on its contents.", "OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "or updates the specified rule. Rules are enabled by default, or based on", "well as on a schedule. Most services in AWS treat : or /", "publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons", "'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ], 'NextToken': 'string' } :returns:", "take effect. See also: AWS API Documentation :example: response = client.enable_rule( Name='string' )", "Console. For some target types, PutTargets provides target-specific parameters. If the target is", "the event. These are key-value pairs, where each value is a JSON path.", "--Valid JSON text passed to the target. In this case, nothing from the", "using DisableRule . When you create or update a rule, incoming events might", "the values of the keys from InputPathsMap to customize the data sent to", "string :param TargetArn: [REQUIRED] The Amazon Resource Name (ARN) of the target resource.", "targets, you can use a different IAM role for each target. Input (string)", "{ 'Targets': [ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath':", "CloudWatch Events User Guide . :type State: string :param State: Indicates whether the", "\"\"\" Creates or updates the specified rule. Rules are enabled by default, or", "allow a short period of time for changes to take effect. See also:", "the presigned url is valid for. By default it expires in an hour", "and Amazon SNS resources, CloudWatch Events relies on resource-based policies. For EC2 instances,", "and a ScheduleExpression, in which case the rule triggers on matching events as", "against the resources that you own, Amazon CloudWatch Events needs the appropriate permissions.", "eventId as the partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON path to be", ":rtype: dict :return: { 'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string',", "to send customized input to the target. InputPathsMap (dict) --Map of JSON paths", "Events User Guide . :type Event: string :param Event: [REQUIRED] The event, in", "the target. InputPathsMap (dict) --Map of JSON paths to be extracted from the", "period of time for changes to take effect. See also: AWS API Documentation", "string :param Name: [REQUIRED] The name of the rule that you are creating", "in event patterns and rules. Be sure to use the correct ARN characters", "rule. :type RoleArn: string :param RoleArn: The Amazon Resource Name (ARN) of the", "the AWS Management Console. For some target types, PutTargets provides target-specific parameters. If", "targets from the specified rule. When the rule is triggered, those targets are", "'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None):", "triggered, those targets are no longer be invoked. When you remove a target,", "string :param Event: [REQUIRED] The event, in JSON format, to test against the", "'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' },", "In this case, nothing from the event itself is passed to the target.", "'rate(5 minutes)'. :type EventPattern: string :param EventPattern: The event pattern. For more information,", "one. :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode':", "on value of the state. You can disable a rule using DisableRule .", "removed targets might continue to be invoked. Please allow a short period of", "more key-value pairs from the event and then use that data to send", "to take effect. A rule must contain at least an EventPattern or ScheduleExpression.", "path. You must use JSON dot notation, not bracket notation. (string) -- (string)", "be invoked. Please allow a short period of time for changes to take", "are made at the same time. If that happens, FailedEntryCount is non-zero in", "creating event patterns so that they match the ARN syntax in the event", "For AWS Lambda and Amazon SNS resources, CloudWatch Events relies on resource-based policies.", "Documentation :example: response = client.enable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED]", "Please allow a short period of time for changes to take effect. A", "'Description': 'string', 'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None): \"\"\" Disables the specified", "EC2 instances that are to be sent the command, specified as key-value pairs.", "rule to invoke Amazon EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support", "CloudWatch Events rules. You can either list all the rules or you can", "period of time for changes to take effect. A rule must contain at", "count to be used, if the event target is an Amazon ECS task.", "above copyright notice and this permission notice shall be included in all copies", "enabled by default, or based on value of the state. You can disable", "'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string'", "JSON paths to be extracted from the event. These are key-value pairs, where", ":param ExpiresIn: The number of seconds the presigned url is valid for. By", "event detail. Detail (string) --In the JSON sense, an object containing fields, which", "that you own, Amazon CloudWatch Events needs the appropriate permissions. For AWS Lambda", "key. For more information, see Amazon Kinesis Streams Key Concepts in the Amazon", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "is a list of tag values. If Key is InstanceIds , Values is", ". For more information, see Authentication and Access Control in the Amazon CloudWatch", "You must remove all targets from a rule using RemoveTargets before you can", "NextToken: The token returned by a previous call to retrieve the next set", "and so on. (dict) --Represents an event to be submitted. Time (datetime) --The", "string :param Name: [REQUIRED] The name of the rule. \"\"\" pass def describe_rule(Name=None):", "tag. (dict) --Information about the EC2 instances that are to be sent the", "Adds the specified targets to the specified rule, or updates the targets if", "}, ], 'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists", "permissions. For AWS Lambda and Amazon SNS resources, CloudWatch Events relies on resource-based", "call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the specified rule. You must", "of seconds the presigned url is valid for. By default it expires in", "might not immediately start matching to a newly enabled rule. Please allow a", "name of the rule that you are creating or updating. :type ScheduleExpression: string", "more information, see The JavaScript Object Notation (JSON) Data Interchange Format . InputPath", "to be extracted from the event and used as the partition key. For", "Amazon CloudWatch Events User Guide . :type State: string :param State: Indicates whether", "events might continue to match to the deleted rule. Please allow a short", "furnished to do so, subject to the following conditions: The above copyright notice", "by using the KinesisParameters argument. To invoke a command on multiple EC2 instances", "(string) -- (string) -- InputTemplate (string) -- [REQUIRED]Input template where you can use", "def delete_rule(Name=None): \"\"\" Deletes the specified rule. You must remove all targets from", "To be able to make API calls against the resources that you own,", "custom input to a target based on certain event data. You can extract", "'string' }, ] } :returns: Rule (string) -- [REQUIRED] The name of the", "-- [REQUIRED]Currently, we support including only one RunCommandTarget block, which specifies either an", "permit persons to whom the Software is furnished to do so, subject to", "the specified rule. See also: AWS API Documentation :example: response = client.list_targets_by_rule( Rule='string',", "match in event patterns and rules. Be sure to use the correct ARN", "NamePrefix: The prefix matching the rule name. :type NextToken: string :param NextToken: The", "Events rules. You can either list all the rules or you can provide", "the PutEvents call is used. Source (string) --The source of the event. Resources", "\"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets to the specified", "for changes to take effect. See also: AWS API Documentation :example: response =", "of the rule. :type NextToken: string :param NextToken: The token returned by a", "copies of the Software, and to permit persons to whom the Software is", "won't match any events, and won't self-trigger if it has a schedule expression.", "'TaskCount': 123 } }, ] ) :type Rule: string :param Rule: [REQUIRED] The", "(list) -- [REQUIRED]If Key is tag: tag-key , Values is a list of", "--Parameters used when you are using the rule to invoke Amazon EC2 Run", "TaskCount (integer) --The number of tasks to create based on the TaskDefinition .", ") :type Entries: list :param Entries: [REQUIRED] The entry that defines an event", "one rule triggers multiple targets, you can use a different IAM role for", "dict :param Params: The parameters normally passed to ClientMethod. :type ExpiresIn: int :param", "AWS API Documentation :example: response = client.delete_rule( Name='string' ) :type Name: string :param", "to by using the KinesisParameters argument. To invoke a command on multiple EC2", "] } :returns: Rule (string) -- [REQUIRED] The name of the rule. Targets", "''' The MIT License (MIT) Copyright (c) 2016 WavyCloud Permission is hereby granted,", "based on value of the state. You can disable a rule using DisableRule", "MIT License (MIT) Copyright (c) 2016 WavyCloud Permission is hereby granted, free of", "if the create_foo operation can be paginated, you can use the call client.get_paginator('create_foo').", "'string' } :returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists", "the rule. :type Ids: list :param Ids: [REQUIRED] The IDs of the targets", "See also: AWS API Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED',", "you can use to control shard assignment, when the target is an Amazon", "event data. You can extract one or more key-value pairs from the event", "--The value of the JSONPath that is used for extracting part of the", "case the rule triggers on matching events as well as on a schedule.", "CloudWatch Events uses an exact match in event patterns and rules. Be sure", "name of the rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables the specified rule.", "'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string' },", "definition to use if the event target is an Amazon ECS cluster. TaskCount", "'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ] ) :type Rule: string :param Rule:", "using the KinesisParameters argument. To invoke a command on multiple EC2 instances with", "\"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a paginator for an operation. :type operation_name:", "the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the specified rule. You", "and Event Patterns in the Amazon CloudWatch Events User Guide . :type State:", "of InstanceIds or a tag. (dict) --Information about the EC2 instances that are", "fields, which may also contain nested subobjects. No constraints are imposed on its", "of the rule. :type Ids: list :param Ids: [REQUIRED] The IDs of the", "match to the disabled rule. Please allow a short period of time for", "string :param Rule: [REQUIRED] The name of the rule. :type NextToken: string :param", "be invoked. When you remove a target, when the associated rule triggers, removed", "\"\"\" Generate a presigned url given a client, its method, and arguments :type", "For example, 'cron(0 20 * * ? *)', 'rate(5 minutes)'. :type EventPattern: string", ":param Targets: [REQUIRED] The targets to update or add to the rule. (dict)", "\"\"\" Check if an operation can be paginated. :type operation_name: string :param operation_name:", "target and the error code. See also: AWS API Documentation :example: response =", "is the same name as the method name on the client. For example,", "tasks to create based on the TaskDefinition . The default is one. \"\"\"", "you disable a rule, incoming events might continue to match to the disabled", "argument. To invoke a command on multiple EC2 instances with one rule, you", "matching to a newly enabled rule. Please allow a short period of time", "Name: [REQUIRED] The name of the rule that you are creating or updating.", "create_foo operation can be paginated, you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator}", "def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets from the specified rule. When", "specify several parameters for the entry such as the source and type of", "{ 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def", ", InputPath , or InputTransformer , you must use JSON dot notation, not", "or a tag. (dict) --Information about the EC2 instances that are to be", "relies on IAM roles that you specify in the RoleARN argument in PutTarget", "[REQUIRED] The name of the rule. Targets (list) -- [REQUIRED] The targets to", "Limit=None): \"\"\" Lists your Amazon CloudWatch Events rules. You can either list all", "given a client, its method, and arguments :type ClientMethod: string :param ClientMethod: The", "the rules in Amazon CloudWatch Events can invoke a specific target in your", "or updates the targets if they are already associated with the rule. Targets", "of this software and associated documentation files (the \"Software\"), to deal in the", "''' def can_paginate(operation_name=None): \"\"\" Check if an operation can be paginated. :type operation_name:", "to the deleted rule. Please allow a short period of time for changes", "The number of seconds the presigned url is valid for. By default it", "[REQUIRED] The Amazon Resource Name (ARN) of the target resource. :type NextToken: string", "client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath':", ":param EventPattern: [REQUIRED] The event pattern. For more information, see Events and Event", "sell copies of the Software, and to permit persons to whom the Software", "Deletes the specified rule. You must remove all targets from a rule using", "(dict) --Targets are the resources to be invoked when a rule is triggered.", "(list) --AWS resources, identified by Amazon Resource Name (ARN), which the event primarily", "client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string :param EventPattern: [REQUIRED] The event pattern.", "multiple EC2 instances with one rule, you can use the RunCommandParameters field. To", "present. (string) -- DetailType (string) --Free-form string used to decide what fields to", "list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch Events rules. You can either", "the ARN syntax in the event you want to match. See also: AWS", "Rule: string :param Rule: [REQUIRED] The name of the rule. :type NextToken: string", "disabled. :type Description: string :param Description: A description of the rule. :type RoleArn:", "tag: tag-key or InstanceIds . Values (list) -- [REQUIRED]If Key is tag: tag-key", ":example: response = client.enable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The", "\"\"\" Deletes the specified rule. You must remove all targets from a rule", "be either tag: tag-key or InstanceIds . Values (list) -- [REQUIRED]If Key is", "Key Concepts in the Amazon Kinesis Streams Developer Guide . RunCommandParameters (dict) --Parameters", "not immediately start matching to a newly enabled rule. Please allow a short", "event, per RFC3339 . If no timestamp is provided, the timestamp of the", "rule, you can use the RunCommandParameters field. To be able to make API", "--The custom parameter you can use to control shard assignment, when the target", "and used as the partition key. For more information, see Amazon Kinesis Streams", "--The source of the event. Resources (list) --AWS resources, identified by Amazon Resource", "the rule. :type Targets: list :param Targets: [REQUIRED] The targets to update or", "def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url given a client,", "Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string :param Name: [REQUIRED]", "the client. For example, if the method name is create_foo, and you'd normally", "The token returned by a previous call to retrieve the next set of", "[REQUIRED]Can be either tag: tag-key or InstanceIds . Values (list) -- [REQUIRED]If Key", "IAM role associated with the rule. :rtype: dict :return: { 'RuleArn': 'string' }", "be used for this target when the rule is triggered. If one rule", "Name: [REQUIRED] The name of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None,", "can optionally specify which shard the event goes to by using the KinesisParameters", "ClientMethod: The client method to presign for :type Params: dict :param Params: The", "(string) -- [REQUIRED]Input template where you can use the values of the keys", "key-value pairs from the event and then use that data to send customized", "of the rules in Amazon CloudWatch Events can invoke a specific target in", "string :param Rule: [REQUIRED] The name of the rule. :type Targets: list :param", "for the entry such as the source and type of the event, resources", ", you must use JSON dot notation, not bracket notation. When you add", "to return. :rtype: dict :return: { 'RuleNames': [ 'string', ], 'NextToken': 'string' }", "Description: A description of the rule. :type RoleArn: string :param RoleArn: The Amazon", "is to use the eventId as the partition key. PartitionKeyPath (string) -- [REQUIRED]The", "of the Software, and to permit persons to whom the Software is furnished", "API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string", "continue to match to the deleted rule. Please allow a short period of", "stream, you can optionally specify which shard the event goes to by using", "}, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string',", "{ 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string', 'Values': [", "Limit: The maximum number of results to return. :rtype: dict :return: { 'Rules':", "rules. Be sure to use the correct ARN characters when creating event patterns", "{ 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ], 'NextToken': 'string' } :returns: (string)", "API Documentation :example: response = client.enable_rule( Name='string' ) :type Name: string :param Name:", "Creates or updates the specified rule. Rules are enabled by default, or based", "be matched to rules. See also: AWS API Documentation :example: response = client.put_events(", "in Amazon Resource Names (ARNs). However, CloudWatch Events uses an exact match in", "copyright notice and this permission notice shall be included in all copies or", "-- [REQUIRED]Can be either tag: tag-key or InstanceIds . Values (list) -- [REQUIRED]If", ". When you specify Input , InputPath , or InputTransformer , you must", "for changes to take effect. A rule must contain at least an EventPattern", "add targets to a rule and the associated rule triggers soon after, new", "associated with the event, and so on. (dict) --Represents an event to be", "matched to rules. See also: AWS API Documentation :example: response = client.put_events( Entries=[", "contain at least an EventPattern or ScheduleExpression. Rules with EventPatterns are triggered when", "tasks, AWS Step Functions state machines, and built-in targets. Note that creating rules", "to a newly enabled rule. Please allow a short period of time for", "ScheduleExpression, in which case the rule triggers on matching events as well as", "PutEvents call is used. Source (string) --The source of the event. Resources (list)", "based on the TaskDefinition . The default is one. \"\"\" pass def remove_targets(Rule=None,", "the RoleARN argument in PutTarget . For more information, see Authentication and Access", "API Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string :param", "that they match the ARN syntax in the event you want to match.", "(string) -- DetailType (string) --Free-form string used to decide what fields to expect", "def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified event pattern matches the provided", "are using the rule to invoke Amazon EC2 Run Command. RunCommandTargets (list) --", "this key may specify multiple values. Key (string) -- [REQUIRED]Can be either tag:", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "use the eventId as the partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON path", "the entry such as the source and type of the event, resources associated", "specified as key-value pairs. Each RunCommandTarget block can include only one key, but", "paginated, you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter():", "prefix matching the rule name. :type NextToken: string :param NextToken: The token returned", "SNS resources, CloudWatch Events relies on resource-based policies. For EC2 instances, Amazon Kinesis", "By default, the http method is whatever is used in the method's model.", "Amazon Kinesis stream, you can optionally specify which shard the event goes to", "rule using RemoveTargets before you can delete the rule. When you delete a", "fails. When you enable a rule, incoming events might not immediately start matching", "or based on value of the state. You can disable a rule using", ":type Rule: string :param Rule: [REQUIRED] The name of the rule. :type Ids:", "the specified rule. You must remove all targets from a rule using RemoveTargets", "of results to return. :rtype: dict :return: { 'RuleNames': [ 'string', ], 'NextToken':", "specify Input , InputPath , or InputTransformer , you must use JSON dot", ". The default is one. :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [", "might continue to match to the deleted rule. Please allow a short period", "match any events, and won't self-trigger if it has a schedule expression. When", "pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets from the specified rule.", "use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\" pass", "NextToken='string', Limit=123 ) :type TargetArn: string :param TargetArn: [REQUIRED] The Amazon Resource Name", "Events and Event Patterns in the Amazon CloudWatch Events User Guide . :type", "Object Notation (JSON) Data Interchange Format . InputPath (string) --The value of the", "paginated. :type operation_name: string :param operation_name: The operation name. This is the same", "dict :return: { 'RuleNames': [ 'string', ], 'NextToken': 'string' } :returns: (string) --", "a list of Amazon EC2 instance IDs. (string) -- EcsParameters (dict) --Contains the", "See also: AWS API Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 )", "to customize the data sent to the target. KinesisParameters (dict) --The custom parameter", "match the ARN syntax in the event you want to match. See also:", "'string' } \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets to", "invoked when a rule is triggered. Target types include EC2 instances, AWS Lambda", "take effect. See also: AWS API Documentation :example: response = client.disable_rule( Name='string' )", "distribute, sublicense, and/or sell copies of the Software, and to permit persons to", "software and associated documentation files (the \"Software\"), to deal in the Software without", "on multiple EC2 instances with one rule, you can use the RunCommandParameters field.", "resources that you own, Amazon CloudWatch Events needs the appropriate permissions. For AWS", "] ) :type Entries: list :param Entries: [REQUIRED] The entry that defines an", "dot notation, not bracket notation. (string) -- (string) -- InputTemplate (string) -- [REQUIRED]Input", "20 * * ? *)', 'rate(5 minutes)'. :type EventPattern: string :param EventPattern: The", "the specified rule. If the rule does not exist, the operation fails. When", "of the rule. :rtype: dict :return: { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string',", "pass def disable_rule(Name=None): \"\"\" Disables the specified rule. A disabled rule won't match", "default is one. :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId':", "patterns and rules. Be sure to use the correct ARN characters when creating", "from the specified rule. When the rule is triggered, those targets are no", "with the event, and so on. (dict) --Represents an event to be submitted.", "shall be included in all copies or substantial portions of the Software. THE", ", Values is a list of tag values. If Key is InstanceIds ,", "response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string :param NamePrefix: The", "provide custom input to a target based on certain event data. You can", "operation can be paginated. :type operation_name: string :param operation_name: The operation name. This", "is one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets from", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "If one rule triggers multiple targets, you can use a different IAM role", "--Settings to enable you to provide custom input to a target based on", "\"\"\" pass def describe_rule(Name=None): \"\"\" Describes the specified rule. See also: AWS API", "Name (ARN) of the target resource. :type NextToken: string :param NextToken: The token", "Definitions in the Amazon EC2 Container Service Developer Guide . TaskDefinitionArn (string) --", "'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\"", "an EventPattern or ScheduleExpression. Rules with EventPatterns are triggered when a matching event", "must use JSON dot notation, not bracket notation. (string) -- (string) -- InputTemplate", "] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ], 'NextToken': 'string'", "a different IAM role for each target. Input (string) --Valid JSON text passed", "\"\"\" Lists the targets assigned to the specified rule. See also: AWS API", "EventPattern='string', Event='string' ) :type EventPattern: string :param EventPattern: [REQUIRED] The event pattern. For", "[ 'string', ], 'NextToken': 'string' } :returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None,", "is an Amazon Kinesis stream. If you do not include this parameter, the", "Kinesis stream. If you do not include this parameter, the default is to", "[REQUIRED] The name of the rule. :type Ids: list :param Ids: [REQUIRED] The", "event, in JSON format, to test against the event pattern. :rtype: dict :return:", "to take effect. See also: AWS API Documentation :example: response = client.disable_rule( Name='string'", "Entries=[ { 'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources': [ 'string', ], 'DetailType':", ":return: { 'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the", ":param Params: The parameters normally passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn:", "The name of the rule. :type Ids: list :param Ids: [REQUIRED] The IDs", "an operation. :type operation_name: string :param operation_name: The operation name. This is the", "that is used for extracting part of the matched event when passing it", "on the client. For example, if the method name is create_foo, and you'd", "The maximum number of results to return. :rtype: dict :return: { 'Targets': [", "based on certain event data. You can extract one or more key-value pairs", "you can use a different IAM role for each target. Input (string) --Valid", "is hereby granted, free of charge, to any person obtaining a copy of", "A rule can have both an EventPattern and a ScheduleExpression, in which case", "rule. Please allow a short period of time for changes to take effect.", "remove all targets from a rule using RemoveTargets before you can delete the", "action can partially fail if too many requests are made at the same", ":rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None):", "does not exist, the operation fails. When you enable a rule, incoming events", "Detail (string) --In the JSON sense, an object containing fields, which may also", "the rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes the specified rule. See also:", "IAM role to be used for this target when the rule is triggered.", "JSON paths, see JSONPath . InputTransformer (dict) --Settings to enable you to provide", "return. :rtype: dict :return: { 'Targets': [ { 'Id': 'string', 'Arn': 'string', 'RoleArn':", "API calls against the resources that you own, Amazon CloudWatch Events needs the", "some target types, PutTargets provides target-specific parameters. If the target is an Amazon", "(string) --The value of the JSONPath that is used for extracting part of", "[REQUIRED] The IDs of the targets to remove from the rule. (string) --", "short period of time for changes to take effect. A rule must contain", "delete_rule(Name=None): \"\"\" Deletes the specified rule. You must remove all targets from a", "match. See also: AWS API Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string',", "triggered. Target types include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon", "You must use JSON dot notation, not bracket notation. For more information, see", "short period of time for changes to take effect. See also: AWS API", "event. These are key-value pairs, where each value is a JSON path. You", "immediately start matching to new or updated rules. Please allow a short period", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "(string) --The Amazon Resource Name (ARN) of the IAM role to be used", "is enabled or disabled. :type Description: string :param Description: A description of the", "names. See also: AWS API Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123", "'Targets': [ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string',", "Lists your Amazon CloudWatch Events rules. You can either list all the rules", "the rule. When you delete a rule, incoming events might continue to match", "events to Amazon CloudWatch Events so that they can be matched to rules.", "from the event. These are key-value pairs, where each value is a JSON", "to decide what fields to expect in the event detail. Detail (string) --In", "specified rule. See also: AWS API Documentation :example: response = client.describe_rule( Name='string' )", "also contain nested subobjects. No constraints are imposed on its contents. :rtype: dict", "from the event and used as the partition key. For more information, see", "are imposed on its contents. :rtype: dict :return: { 'FailedEntryCount': 123, 'Entries': [", "event patterns so that they match the ARN syntax in the event you", "data to send customized input to the target. InputPathsMap (dict) --Map of JSON", ":example: response = client.delete_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The", "ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url given a client, its method, and", "invoke Amazon EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support including only", "the RunCommandParameters field. To be able to make API calls against the resources", "event and used as the partition key. For more information, see Amazon Kinesis", "Kinesis Streams Key Concepts in the Amazon Kinesis Streams Developer Guide . RunCommandParameters", "Limit=123 ) :type TargetArn: string :param TargetArn: [REQUIRED] The Amazon Resource Name (ARN)", "an Amazon ECS task. For more information about Amazon ECS tasks, see Task", "'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def test_event_pattern(EventPattern=None,", "event you want to match. See also: AWS API Documentation :example: response =", "of the event, resources associated with the event, and so on. (dict) --Represents", "'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ]", "} :returns: Rule (string) -- [REQUIRED] The name of the rule. Targets (list)", "http method is whatever is used in the method's model. \"\"\" pass def", "a rule is triggered. Example targets include EC2 instances, AWS Lambda functions, Amazon", "client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule: string :param Rule: [REQUIRED] The name", "to match to the deleted rule. Please allow a short period of time", "updating. :type ScheduleExpression: string :param ScheduleExpression: The scheduling expression. For example, 'cron(0 20", "notation. (string) -- (string) -- InputTemplate (string) -- [REQUIRED]Input template where you can", "of results. :type Limit: integer :param Limit: The maximum number of results to", "rule triggers on matching events as well as on a schedule. Most services", "Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url given a client, its method,", "the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\" pass def", "ClientMethod: string :param ClientMethod: The client method to presign for :type Params: dict", "such as the source and type of the event, resources associated with the", "we support including only one RunCommandTarget block, which specifies either an array of", "Rule: [REQUIRED] The name of the rule. :type NextToken: string :param NextToken: The", "NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string :param NamePrefix: The prefix matching the", "as the source and type of the event, resources associated with the event,", "the partition key. For more information, see Amazon Kinesis Streams Key Concepts in", "\"\"\" Describes the specified rule. See also: AWS API Documentation :example: response =", "to test against the event pattern. :rtype: dict :return: { 'Result': True|False }", "'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': {", "of the task definition to use if the event target is an Amazon", "Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN of the task definition to use", "merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit", "CloudWatch Events User Guide . :type Event: string :param Event: [REQUIRED] The event,", "= client.enable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of", "(string) -- [REQUIRED] The name of the rule. Targets (list) -- [REQUIRED] The", "a short period of time for changes to take effect. A rule must", "operation as client.create_foo(**kwargs), if the create_foo operation can be paginated, you can use", "'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string' },", "that they can be matched to rules. See also: AWS API Documentation :example:", "Most services in AWS treat : or / as the same character in", "int :param ExpiresIn: The number of seconds the presigned url is valid for.", "--Information about the EC2 instances that are to be sent the command, specified", "'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken':", "Documentation :example: response = client.disable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED]", "deleted rule. Please allow a short period of time for changes to take", "Name: [REQUIRED] The name of the rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables", "a rule, incoming events might not immediately start matching to new or updated", "only in the AWS Management Console. For some target types, PutTargets provides target-specific", "'RoleArn': 'string' }, ], 'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None):", "User Guide . :type State: string :param State: Indicates whether the rule is", "[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer':", "2016 WavyCloud Permission is hereby granted, free of charge, to any person obtaining", "the event detail. Detail (string) --In the JSON sense, an object containing fields,", "See also: AWS API Documentation :example: response = client.describe_rule( Name='string' ) :type Name:", "'string', 'TaskCount': 123 } }, ], 'NextToken': 'string' } :returns: (string) -- (string)", "immediately invoked. Please allow a short period of time for changes to take", "Events can invoke a specific target in your account. See also: AWS API", "= client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule: string :param Rule: [REQUIRED] The", "EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or updates the specified rule. Rules are", "-- (string) -- \"\"\" pass def put_events(Entries=None): \"\"\" Sends custom events to Amazon", "the Amazon EC2 Container Service Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "put_events(Entries=None): \"\"\" Sends custom events to Amazon CloudWatch Events so that they can", "Input (string) --Valid JSON text passed to the target. In this case, nothing", "Name (ARN), which the event primarily concerns. Any number, including zero, may be", "of Amazon EC2 instance IDs. (string) -- EcsParameters (dict) --Contains the Amazon ECS", "'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests", "a command on multiple EC2 instances with one rule, you can use the", "or you can provide a prefix to match to the rule names. See", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "can have both an EventPattern and a ScheduleExpression, in which case the rule", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF", ":param ScheduleExpression: The scheduling expression. For example, 'cron(0 20 * * ? *)',", "def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or updates the specified", "The default is one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "stream. If you do not include this parameter, the default is to use", "API Documentation :example: response = client.disable_rule( Name='string' ) :type Name: string :param Name:", "that you are creating or updating. :type ScheduleExpression: string :param ScheduleExpression: The scheduling", "the rule to invoke Amazon EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we", "an array of InstanceIds or a tag. (dict) --Information about the EC2 instances", "charge, to any person obtaining a copy of this software and associated documentation", "command, specified as key-value pairs. Each RunCommandTarget block can include only one key,", "when creating event patterns so that they match the ARN syntax in the", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", ":return: { 'Targets': [ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string',", "of results to return. :rtype: dict :return: { 'Rules': [ { 'Name': 'string',", "get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules for", "and the associated rule triggers soon after, new or updated targets might not", "bracket notation. For more information about JSON paths, see JSONPath . InputTransformer (dict)", "*)', 'rate(5 minutes)'. :type EventPattern: string :param EventPattern: The event pattern. For more", "] ) :type Rule: string :param Rule: [REQUIRED] The name of the rule.", "the JSONPath that is used for extracting part of the matched event when", "the specified rule. When the rule is triggered, those targets are no longer", "specified targets from the specified rule. When the rule is triggered, those targets", "Amazon ECS cluster. TaskCount (integer) --The number of tasks to create based on", "'DetailType': 'string', 'Detail': 'string' }, ] ) :type Entries: list :param Entries: [REQUIRED]", "{ 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string',", "The Amazon Resource Name (ARN) of the IAM role associated with the rule.", "the rule that you are creating or updating. :type ScheduleExpression: string :param ScheduleExpression:", "you'd normally invoke the operation as client.create_foo(**kwargs), if the create_foo operation can be", "a short period of time for changes to take effect. This action can", "Arn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the target. RoleArn (string)", "name of the rule. :type Ids: list :param Ids: [REQUIRED] The IDs of", "persons to whom the Software is furnished to do so, subject to the", "the specified rule. Rules are enabled by default, or based on value of", "to match. See also: AWS API Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string'", "take effect. See also: AWS API Documentation :example: response = client.delete_rule( Name='string' )", "JavaScript Object Notation (JSON) Data Interchange Format . InputPath (string) --The value of", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "you delete a rule, incoming events might continue to match to the deleted", "ScheduleExpressions self-trigger based on the given schedule. A rule can have both an", "all the rules or you can provide a prefix to match to the", "calls against the resources that you own, Amazon CloudWatch Events needs the appropriate", "results to return. :rtype: dict :return: { 'Rules': [ { 'Name': 'string', 'Arn':", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "dict :return: { 'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds", "specifies either an array of InstanceIds or a tag. (dict) --Information about the", "string :param HttpMethod: The http method to use on the generated url. By", "Targets: list :param Targets: [REQUIRED] The targets to update or add to the", "presigned url given a client, its method, and arguments :type ClientMethod: string :param", "\"\"\" Removes the specified targets from the specified rule. When the rule is", "rule can have both an EventPattern and a ScheduleExpression, in which case the", "string :param Rule: [REQUIRED] The name of the rule. :type Ids: list :param", "the specified target. You can see which of the rules in Amazon CloudWatch", "Guide . When you specify Input , InputPath , or InputTransformer , you", "source of the event. Resources (list) --AWS resources, identified by Amazon Resource Name", "exact match in event patterns and rules. Be sure to use the correct", "token returned by a previous call to retrieve the next set of results.", "can use the RunCommandParameters field. To be able to make API calls against", "ARN of the task definition to use if the event target is an", "Resources (list) --AWS resources, identified by Amazon Resource Name (ARN), which the event", ":example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name:", "to whom the Software is furnished to do so, subject to the following", "exist, the operation fails. When you enable a rule, incoming events might not", "you want to match. See also: AWS API Documentation :example: response = client.test_event_pattern(", ":type Event: string :param Event: [REQUIRED] The event, in JSON format, to test", "See also: AWS API Documentation :example: response = client.remove_targets( Rule='string', Ids=[ 'string', ]", "'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\"", "requests are made at the same time. If that happens, FailedEntryCount is non-zero", "example, 'cron(0 20 * * ? *)', 'rate(5 minutes)'. :type EventPattern: string :param", "\"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the specified rule. You must remove all", "name of the rule. :type NextToken: string :param NextToken: The token returned by", "role for each target. Input (string) --Valid JSON text passed to the target.", "for each target. Input (string) --Valid JSON text passed to the target. In", "in the Software without restriction, including without limitation the rights to use, copy,", "[ 'string', ], 'DetailType': 'string', 'Detail': 'string' }, ] ) :type Entries: list", "can_paginate(operation_name=None): \"\"\" Check if an operation can be paginated. :type operation_name: string :param", ") :type Name: string :param Name: [REQUIRED] The name of the rule that", "Description=None, RoleArn=None): \"\"\" Creates or updates the specified rule. Rules are enabled by", "Amazon CloudWatch Events rules. You can either list all the rules or you", "default, the http method is whatever is used in the method's model. \"\"\"", "list :param Ids: [REQUIRED] The IDs of the targets to remove from the", "'Detail': 'string' }, ] ) :type Entries: list :param Entries: [REQUIRED] The entry", ":type ScheduleExpression: string :param ScheduleExpression: The scheduling expression. For example, 'cron(0 20 *", "its contents. :rtype: dict :return: { 'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string',", "assignment, when the target is an Amazon Kinesis stream. If you do not", "-- [REQUIRED] The name of the rule. Targets (list) -- [REQUIRED] The targets", "JSON text passed to the target. In this case, nothing from the event", "soon after, new or updated targets might not be immediately invoked. Please allow", "might not be immediately invoked. Please allow a short period of time for", "EventPattern or ScheduleExpression. Rules with EventPatterns are triggered when a matching event is", "to use the correct ARN characters when creating event patterns so that they", "CloudWatch Events can invoke a specific target in your account. See also: AWS", "dict :return: { 'Rules': [ { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State':", "use JSON dot notation, not bracket notation. (string) -- (string) -- InputTemplate (string)", "rule names. See also: AWS API Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string',", "rule triggers soon after, new or updated targets might not be immediately invoked.", "able to make API calls against the resources that you own, Amazon CloudWatch", "if the event target is an Amazon ECS cluster. TaskCount (integer) --The number", "the rule. :type NextToken: string :param NextToken: The token returned by a previous", "State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string :param Name: [REQUIRED] The name of", "disabled rule. Please allow a short period of time for changes to take", "that creating rules with built-in targets is supported only in the AWS Management", "--The Amazon Resource Name (ARN) of the IAM role to be used for", "'string', ] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } },", "non-zero in the response and each entry in FailedEntries provides the ID of", "notation. For more information about JSON paths, see JSONPath . InputTransformer (dict) --Settings", "JSONPath . InputTransformer (dict) --Settings to enable you to provide custom input to", "different IAM role for each target. Input (string) --Valid JSON text passed to", "AWS API Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string", "minutes)'. :type EventPattern: string :param EventPattern: The event pattern. For more information, see", "the target is an Amazon Kinesis stream, you can optionally specify which shard", "For more information, see Amazon Kinesis Streams Key Concepts in the Amazon Kinesis", "DetailType (string) --Free-form string used to decide what fields to expect in the", "to create based on the TaskDefinition . The default is one. :rtype: dict", "Id (string) -- [REQUIRED]The ID of the target. Arn (string) -- [REQUIRED]The Amazon", "to rules. See also: AWS API Documentation :example: response = client.put_events( Entries=[ {", ":param NamePrefix: The prefix matching the rule name. :type NextToken: string :param NextToken:", "case, nothing from the event itself is passed to the target. You must", "pass def put_events(Entries=None): \"\"\" Sends custom events to Amazon CloudWatch Events so that", "The entry that defines an event in your system. You can specify several", "ECS tasks, AWS Step Functions state machines, and built-in targets. Note that creating", "optionally specify which shard the event goes to by using the KinesisParameters argument.", "(dict) --Information about the EC2 instances that are to be sent the command,", "{ 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def", "specific target in your account. See also: AWS API Documentation :example: response =", "] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified event", "results to return. :rtype: dict :return: { 'RuleNames': [ 'string', ], 'NextToken': 'string'", "seconds the presigned url is valid for. By default it expires in an", "'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } :returns: Rule (string) -- [REQUIRED] The", "associated rule triggers, removed targets might continue to be invoked. Please allow a", "instance IDs. (string) -- EcsParameters (dict) --Contains the Amazon ECS task definition and", "--Represents an event to be submitted. Time (datetime) --The timestamp of the event,", "\"\"\" Tests whether the specified event pattern matches the provided event. Most services", "associated documentation files (the \"Software\"), to deal in the Software without restriction, including", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "method, and arguments :type ClientMethod: string :param ClientMethod: The client method to presign", "is observed. Rules with ScheduleExpressions self-trigger based on the given schedule. A rule", "rule is triggered. Target types include EC2 instances, AWS Lambda functions, Amazon Kinesis", "'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def put_rule(Name=None,", "in the event you want to match. See also: AWS API Documentation :example:", "paginated, you can use the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes", "also: AWS API Documentation :example: response = client.describe_rule( Name='string' ) :type Name: string", "'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn':", "Name (ARN) of the IAM role associated with the rule. :rtype: dict :return:", "machines, Run Command, and built-in targets. Id (string) -- [REQUIRED]The ID of the", "bracket notation. For more information, see The JavaScript Object Notation (JSON) Data Interchange", "notation. When you add targets to a rule and the associated rule triggers", "NamePrefix: string :param NamePrefix: The prefix matching the rule name. :type NextToken: string", "services in AWS treat : or / as the same character in Amazon", "You must use JSON dot notation, not bracket notation. For more information about", "relies on resource-based policies. For EC2 instances, Amazon Kinesis streams, and AWS Step", "assigned to the specified rule. See also: AWS API Documentation :example: response =", "event. Most services in AWS treat : or / as the same character", "is triggered. Target types include EC2 instances, AWS Lambda functions, Amazon Kinesis streams,", "AWS API Documentation :example: response = client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1),", "certain event data. You can extract one or more key-value pairs from the", "key may specify multiple values. Key (string) -- [REQUIRED]Can be either tag: tag-key", "User Guide . :type Event: string :param Event: [REQUIRED] The event, in JSON", "(3600 seconds) :type HttpMethod: string :param HttpMethod: The http method to use on", "RunCommandTarget block can include only one key, but this key may specify multiple", "<reponame>gehad-shaat/pyboto3 ''' The MIT License (MIT) Copyright (c) 2016 WavyCloud Permission is hereby", "cluster. TaskCount (integer) --The number of tasks to create based on the TaskDefinition", "incoming events might continue to match to the deleted rule. Please allow a", "Events User Guide . When you specify Input , InputPath , or InputTransformer", "(string) -- EcsParameters (dict) --Contains the Amazon ECS task definition and task count", ":return: { 'Rules': [ { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED',", "target. You can see which of the rules in Amazon CloudWatch Events can", "Values is a list of tag values. If Key is InstanceIds , Values", "obtaining a copy of this software and associated documentation files (the \"Software\"), to", "also: AWS API Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern:", "as key-value pairs. Each RunCommandTarget block can include only one key, but this", "RoleArn='string' ) :type Name: string :param Name: [REQUIRED] The name of the rule", "characters when creating event patterns so that they match the ARN syntax in", "Events so that they can be matched to rules. See also: AWS API", "you can use the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the", "presign for :type Params: dict :param Params: The parameters normally passed to ClientMethod.", "call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None,", "def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets to the specified rule, or", "disable_rule(Name=None): \"\"\" Disables the specified rule. A disabled rule won't match any events,", "Resource Name (ARN) of the target. RoleArn (string) --The Amazon Resource Name (ARN)", "pairs, where each value is a JSON path. You must use JSON dot", "shard the event goes to by using the KinesisParameters argument. To invoke a", "to match to the rule names. See also: AWS API Documentation :example: response", "can either list all the rules or you can provide a prefix to", "Step Functions state machines, and built-in targets. Note that creating rules with built-in", "http method to use on the generated url. By default, the http method", "(dict) --Represents an event to be submitted. Time (datetime) --The timestamp of the", "EventPattern and a ScheduleExpression, in which case the rule triggers on matching events", "the next set of results. :type Limit: integer :param Limit: The maximum number", "of the rule. :type RoleArn: string :param RoleArn: The Amazon Resource Name (ARN)", "-- [REQUIRED]Input template where you can use the values of the keys from", "the rule triggers on matching events as well as on a schedule. Most", "target when the rule is triggered. If one rule triggers multiple targets, you", "of the target. RoleArn (string) --The Amazon Resource Name (ARN) of the IAM", "is InstanceIds , Values is a list of Amazon EC2 instance IDs. (string)", "including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,", "based on the TaskDefinition . The default is one. :rtype: dict :return: {", "self-trigger if it has a schedule expression. When you disable a rule, incoming", "or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets':", "the KinesisParameters argument. To invoke a command on multiple EC2 instances with one", "Input , InputPath , or InputTransformer , you must use JSON dot notation,", "that are to be sent the command, specified as key-value pairs. Each RunCommandTarget", "of the rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables the specified rule. If", "Amazon Resource Name (ARN) of the target. RoleArn (string) --The Amazon Resource Name", "new or updated rules. Please allow a short period of time for changes", "url. By default, the http method is whatever is used in the method's", "or add to the rule. (dict) --Targets are the resources to be invoked", "instances that are to be sent the command, specified as key-value pairs. Each", "Describes the specified rule. See also: AWS API Documentation :example: response = client.describe_rule(", "JSON dot notation, not bracket notation. (string) -- (string) -- InputTemplate (string) --", "the rule. :type RoleArn: string :param RoleArn: The Amazon Resource Name (ARN) of", "paginator for an operation. :type operation_name: string :param operation_name: The operation name. This", "if too many requests are made at the same time. If that happens,", "(string) --The source of the event. Resources (list) --AWS resources, identified by Amazon", "also: AWS API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type", "is used. Source (string) --The source of the event. Resources (list) --AWS resources,", "an object containing fields, which may also contain nested subobjects. No constraints are", "the event target is an Amazon ECS cluster. TaskCount (integer) --The number of", "schedule expression. When you disable a rule, incoming events might continue to match", "use JSON dot notation, not bracket notation. For more information, see The JavaScript", "as on a schedule. Most services in AWS treat : or / as", "in AWS treat : or / as the same character in Amazon Resource", "Documentation :example: response = client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn':", "InputPathsMap to customize the data sent to the target. KinesisParameters (dict) --The custom", "effect. A rule must contain at least an EventPattern or ScheduleExpression. Rules with", "resource. :type NextToken: string :param NextToken: The token returned by a previous call", "{ 'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' },", "at the same time. If that happens, FailedEntryCount is non-zero in the response", "(string) --Valid JSON text passed to the target. In this case, nothing from", "operation can be paginated, you can use the call client.get_paginator('create_foo'). \"\"\" pass def", "def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch Events rules. You can", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN", "uses an exact match in event patterns and rules. Be sure to use", ":returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string :param", "or ScheduleExpression. Rules with EventPatterns are triggered when a matching event is observed.", "InstanceIds , Values is a list of Amazon EC2 instance IDs. (string) --", "'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string', 'Values': [ 'string', ]", "nested subobjects. No constraints are imposed on its contents. :rtype: dict :return: {", "previous call to retrieve the next set of results. :type Limit: integer :param", ":type RoleArn: string :param RoleArn: The Amazon Resource Name (ARN) of the IAM", "can see which of the rules in Amazon CloudWatch Events can invoke a", "name of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate", "string used to decide what fields to expect in the event detail. Detail", "JSON dot notation, not bracket notation. For more information, see The JavaScript Object", "list :param Entries: [REQUIRED] The entry that defines an event in your system.", "string :param RoleArn: The Amazon Resource Name (ARN) of the IAM role associated", "is a list of Amazon EC2 instance IDs. (string) -- EcsParameters (dict) --Contains", "--Contains the Amazon ECS task definition and task count to be used, if", "you can optionally specify which shard the event goes to by using the", "the deleted rule. Please allow a short period of time for changes to", ":param State: Indicates whether the rule is enabled or disabled. :type Description: string", "Copyright (c) 2016 WavyCloud Permission is hereby granted, free of charge, to any", "Amazon ECS tasks, AWS Step Functions state machines, and built-in targets. Note that", "client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None,", "to retrieve the next set of results. :type Limit: integer :param Limit: The", "on the generated url. By default, the http method is whatever is used", "}, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ], 'NextToken':", "EC2 Container Service Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN of the", "is one. :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string',", "in the Amazon CloudWatch Events User Guide . :type Event: string :param Event:", "the following conditions: The above copyright notice and this permission notice shall be", "Limit: integer :param Limit: The maximum number of results to return. :rtype: dict", "Data Interchange Format . InputPath (string) --The value of the JSONPath that is", "response = client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type Rule: string :param Rule:", "target, when the associated rule triggers, removed targets might continue to be invoked.", "of the failed target and the error code. See also: AWS API Documentation", "the task definition to use if the event target is an Amazon ECS", "may be present. (string) -- DetailType (string) --Free-form string used to decide what", "see Amazon Kinesis Streams Key Concepts in the Amazon Kinesis Streams Developer Guide", "event goes to by using the KinesisParameters argument. To invoke a command on", "seconds) :type HttpMethod: string :param HttpMethod: The http method to use on the", "rule triggers multiple targets, you can use a different IAM role for each", "to take effect. See also: AWS API Documentation :example: response = client.enable_rule( Name='string'", "event when passing it to the target. You must use JSON dot notation,", "machines, and built-in targets. Note that creating rules with built-in targets is supported", "on matching events as well as on a schedule. Most services in AWS", "Streams Developer Guide . RunCommandParameters (dict) --Parameters used when you are using the", "and built-in targets. Note that creating rules with built-in targets is supported only", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "pass def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the", "rule. Targets (list) -- [REQUIRED] The targets to update or add to the", "included in all copies or substantial portions of the Software. THE SOFTWARE IS", "}, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ] )", "to expect in the event detail. Detail (string) --In the JSON sense, an", "], 'NextToken': 'string' } :returns: (string) -- (string) -- \"\"\" pass def put_events(Entries=None):", "Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string',", "account. See also: AWS API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123", ":example: response = client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources':", "triggers on matching events as well as on a schedule. Most services in", "rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url", "input to the target. InputPathsMap (dict) --Map of JSON paths to be extracted", "can delete the rule. When you delete a rule, incoming events might continue", "want to match. See also: AWS API Documentation :example: response = client.put_rule( Name='string',", "data. You can extract one or more key-value pairs from the event and", "path to be extracted from the event and used as the partition key.", "Indicates whether the rule is enabled or disabled. :type Description: string :param Description:", "used for extracting part of the matched event when passing it to the", "matches the provided event. Most services in AWS treat : or / as", ") :type TargetArn: string :param TargetArn: [REQUIRED] The Amazon Resource Name (ARN) of", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,", "the rule. Targets (list) -- [REQUIRED] The targets to update or add to", "for :type Params: dict :param Params: The parameters normally passed to ClientMethod. :type", "streams, Amazon ECS tasks, AWS Step Functions state machines, Run Command, and built-in", "} \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified event pattern", "'string', ], 'NextToken': 'string' } :returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None,", "ECS tasks, see Task Definitions in the Amazon EC2 Container Service Developer Guide", "can partially fail if too many requests are made at the same time.", "and you'd normally invoke the operation as client.create_foo(**kwargs), if the create_foo operation can", "rule. See also: AWS API Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123", "the rules for the specified target. You can see which of the rules", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "the generated url. By default, the http method is whatever is used in", "if an operation can be paginated. :type operation_name: string :param operation_name: The operation", "Rule: string :param Rule: [REQUIRED] The name of the rule. :type Ids: list", "The targets to update or add to the rule. (dict) --Targets are the", "response and each entry in FailedEntries provides the ID of the failed target", "when the target is an Amazon Kinesis stream. If you do not include", "RemoveTargets before you can delete the rule. When you delete a rule, incoming", "call to retrieve the next set of results. :type Limit: integer :param Limit:", "TargetArn: [REQUIRED] The Amazon Resource Name (ARN) of the target resource. :type NextToken:", "Amazon Kinesis streams, and AWS Step Functions state machines, CloudWatch Events relies on", "method name on the client. For example, if the method name is create_foo,", "be sent the command, specified as key-value pairs. Each RunCommandTarget block can include", "-- [REQUIRED] The targets to update or add to the rule. (dict) --Targets", "the TaskDefinition . The default is one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\"", "and Event Patterns in the Amazon CloudWatch Events User Guide . :type Event:", "Entries: list :param Entries: [REQUIRED] The entry that defines an event in your", "event. Resources (list) --AWS resources, identified by Amazon Resource Name (ARN), which the", "to control shard assignment, when the target is an Amazon Kinesis stream. If", "the state. You can disable a rule using DisableRule . When you create", "'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken': 'string'", "TaskDefinitionArn (string) -- [REQUIRED]The ARN of the task definition to use if the", "same name as the method name on the client. For example, if the", "\"Software\"), to deal in the Software without restriction, including without limitation the rights", "deal in the Software without restriction, including without limitation the rights to use,", "including zero, may be present. (string) -- DetailType (string) --Free-form string used to", "API Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string", "a schedule. Most services in AWS treat : or / as the same", "the Amazon CloudWatch Events User Guide . When you specify Input , InputPath", "the source and type of the event, resources associated with the event, and", "one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets from the", "Documentation :example: response = client.describe_rule( Name='string' ) :type Name: string :param Name: [REQUIRED]", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "rules or you can provide a prefix to match to the rule names.", "in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http method", ", or InputTransformer , you must use JSON dot notation, not bracket notation.", "client.enable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of the", "'ErrorMessage': 'string' }, ] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None,", "resource-based policies. For EC2 instances, Amazon Kinesis streams, and AWS Step Functions state", "can be paginated, you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass", "own, Amazon CloudWatch Events needs the appropriate permissions. For AWS Lambda and Amazon", "Lambda functions, Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines,", "-- [REQUIRED]The Amazon Resource Name (ARN) of the target. RoleArn (string) --The Amazon", "NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch Events rules. You can either list", "\"\"\" pass def disable_rule(Name=None): \"\"\" Disables the specified rule. A disabled rule won't", "} \"\"\" pass def disable_rule(Name=None): \"\"\" Disables the specified rule. A disabled rule", "ECS cluster. TaskCount (integer) --The number of tasks to create based on the", "you to provide custom input to a target based on certain event data.", "When you add targets to a rule and the associated rule triggers soon", "the event, and so on. (dict) --Represents an event to be submitted. Time", "rule is enabled or disabled. :type Description: string :param Description: A description of", "}, ] ) :type Rule: string :param Rule: [REQUIRED] The name of the", "customized input to the target. InputPathsMap (dict) --Map of JSON paths to be", "the rules or you can provide a prefix to match to the rule", "values. If Key is InstanceIds , Values is a list of Amazon EC2", "Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines, Run Command,", "the event and then use that data to send customized input to the", "def get_paginator(operation_name=None): \"\"\" Create a paginator for an operation. :type operation_name: string :param", "} \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets to the", "} :returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your", "Event Patterns in the Amazon CloudWatch Events User Guide . :type State: string", "default, or based on value of the state. You can disable a rule", "so, subject to the following conditions: The above copyright notice and this permission", ":example: response = client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type Rule: string :param", "Targets: [REQUIRED] The targets to update or add to the rule. (dict) --Targets", "dict :return: { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED',", "Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero,", "and the error code. See also: AWS API Documentation :example: response = client.remove_targets(", "123 } }, ] ) :type Rule: string :param Rule: [REQUIRED] The name", "client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string :param TargetArn: [REQUIRED] The Amazon", "rule. (dict) --Targets are the resources to be invoked when a rule is", ":param EventPattern: The event pattern. For more information, see Events and Event Patterns", "this target when the rule is triggered. If one rule triggers multiple targets,", "results to return. :rtype: dict :return: { 'Targets': [ { 'Id': 'string', 'Arn':", "values. Key (string) -- [REQUIRED]Can be either tag: tag-key or InstanceIds . Values", "Interchange Format . InputPath (string) --The value of the JSONPath that is used", ":example: response = client.disable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The", "the rule is triggered. If one rule triggers multiple targets, you can use", "client.create_foo(**kwargs), if the create_foo operation can be paginated, you can use the call", "[ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } :returns: Rule", "when passing it to the target. You must use JSON dot notation, not", "'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters':", "control shard assignment, when the target is an Amazon Kinesis stream. If you", "Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines, and built-in", "-- InputTemplate (string) -- [REQUIRED]Input template where you can use the values of", "When the rule is triggered, those targets are no longer be invoked. When", "? *)', 'rate(5 minutes)'. :type EventPattern: string :param EventPattern: The event pattern. For", "constraints are imposed on its contents. :rtype: dict :return: { 'FailedEntryCount': 123, 'Entries':", "object containing fields, which may also contain nested subobjects. No constraints are imposed", ":type Rule: string :param Rule: [REQUIRED] The name of the rule. :type NextToken:", "more information, see Authentication and Access Control in the Amazon CloudWatch Events User", "the targets to remove from the rule. (string) -- :rtype: dict :return: {", "to be invoked. Please allow a short period of time for changes to", ":param Rule: [REQUIRED] The name of the rule. :type Ids: list :param Ids:", "also: AWS API Documentation :example: response = client.put_events( Entries=[ { 'Time': datetime(2015, 1,", "The parameters normally passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number", "'Rules': [ { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string',", "the matched event when passing it to the target. You must use JSON", "value is a JSON path. You must use JSON dot notation, not bracket", "as client.create_foo(**kwargs), if the create_foo operation can be paginated, you can use the", "error code. See also: AWS API Documentation :example: response = client.remove_targets( Rule='string', Ids=[", "'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string':", "notation, not bracket notation. When you add targets to a rule and the", "streams, Amazon ECS tasks, AWS Step Functions state machines, and built-in targets. Note", "(string) -- [REQUIRED]The Amazon Resource Name (ARN) of the target. RoleArn (string) --The", "The MIT License (MIT) Copyright (c) 2016 WavyCloud Permission is hereby granted, free", "conditions: The above copyright notice and this permission notice shall be included in", "normally passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number of seconds", "Amazon CloudWatch Events User Guide . :type Event: string :param Event: [REQUIRED] The", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "Targets are the resources that are invoked when a rule is triggered. Example", "its method, and arguments :type ClientMethod: string :param ClientMethod: The client method to", "are the resources that are invoked when a rule is triggered. Example targets", "pattern matches the provided event. Most services in AWS treat : or /", "from the event itself is passed to the target. You must use JSON", "1), 'Source': 'string', 'Resources': [ 'string', ], 'DetailType': 'string', 'Detail': 'string' }, ]", "(string) -- [REQUIRED]The ARN of the task definition to use if the event", "if they are already associated with the rule. Targets are the resources that", "] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ] ) :type", "Key (string) -- [REQUIRED]Can be either tag: tag-key or InstanceIds . Values (list)", "functions, Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines, Run", "Source (string) --The source of the event. Resources (list) --AWS resources, identified by", "Events User Guide . :type State: string :param State: Indicates whether the rule", "Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule: string :param", "subobjects. No constraints are imposed on its contents. :rtype: dict :return: { 'FailedEntryCount':", "a rule using RemoveTargets before you can delete the rule. When you delete", "Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines, and built-in targets.", "a JSON path. You must use JSON dot notation, not bracket notation. (string)", "part of the matched event when passing it to the target. You must", "specify in the RoleARN argument in PutTarget . For more information, see Authentication", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "Params: The parameters normally passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The", "string :param EventPattern: The event pattern. For more information, see Events and Event", "'string', 'Resources': [ 'string', ], 'DetailType': 'string', 'Detail': 'string' }, ] ) :type", "rule, or updates the targets if they are already associated with the rule.", "'string' }, ] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the", "triggered when a matching event is observed. Rules with ScheduleExpressions self-trigger based on", "event primarily concerns. Any number, including zero, may be present. (string) -- DetailType", "Amazon EC2 Container Service Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN of", "See also: AWS API Documentation :example: response = client.enable_rule( Name='string' ) :type Name:", "--Map of JSON paths to be extracted from the event. These are key-value", "failed target and the error code. See also: AWS API Documentation :example: response", "AWS API Documentation :example: response = client.disable_rule( Name='string' ) :type Name: string :param", "might continue to be invoked. Please allow a short period of time for", "can use to control shard assignment, when the target is an Amazon Kinesis", ":param Name: [REQUIRED] The name of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None,", "FailedEntries provides the ID of the failed target and the error code. See", "EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support including only one RunCommandTarget", "in your account. See also: AWS API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string',", "also: AWS API Documentation :example: response = client.put_targets( Rule='string', Targets=[ { 'Id': 'string',", "Rules with EventPatterns are triggered when a matching event is observed. Rules with", "a rule, incoming events might not immediately start matching to a newly enabled", "one RunCommandTarget block, which specifies either an array of InstanceIds or a tag.", "--The timestamp of the event, per RFC3339 . If no timestamp is provided,", "treat : or / as the same character in Amazon Resource Names (ARNs).", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''", "to presign for :type Params: dict :param Params: The parameters normally passed to", "AWS API Documentation :example: response = client.enable_rule( Name='string' ) :type Name: string :param", "'Key': 'string', 'Values': [ 'string', ] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string',", "as well as on a schedule. Most services in AWS treat : or", "CloudWatch Events relies on resource-based policies. For EC2 instances, Amazon Kinesis streams, and", "'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None,", "for changes to take effect. This action can partially fail if too many", "name is create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs), if the", "Resource Names (ARNs). However, CloudWatch Events uses an exact match in event patterns", "parameter you can use to control shard assignment, when the target is an", "rule using DisableRule . When you create or update a rule, incoming events", ":param Rule: [REQUIRED] The name of the rule. :type Targets: list :param Targets:", ":param RoleArn: The Amazon Resource Name (ARN) of the IAM role associated with", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "{ 'Rules': [ { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description':", "target-specific parameters. If the target is an Amazon Kinesis stream, you can optionally", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "specify which shard the event goes to by using the KinesisParameters argument. To", "of the target resource. :type NextToken: string :param NextToken: The token returned by", "the Software is furnished to do so, subject to the following conditions: The", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "changes to take effect. See also: AWS API Documentation :example: response = client.delete_rule(", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "of the JSONPath that is used for extracting part of the matched event", "Streams Key Concepts in the Amazon Kinesis Streams Developer Guide . RunCommandParameters (dict)", "PartitionKeyPath (string) -- [REQUIRED]The JSON path to be extracted from the event and", "If that happens, FailedEntryCount is non-zero in the response and each entry in", "Access Control in the Amazon CloudWatch Events User Guide . When you specify", "enable_rule(Name=None): \"\"\" Enables the specified rule. If the rule does not exist, the", "(string) -- \"\"\" pass def put_events(Entries=None): \"\"\" Sends custom events to Amazon CloudWatch", "operation can be paginated, you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\"", "The maximum number of results to return. :rtype: dict :return: { 'RuleNames': [", "Amazon ECS task. For more information about Amazon ECS tasks, see Task Definitions", "a target, when the associated rule triggers, removed targets might continue to be", "(string) -- :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string',", ":type NextToken: string :param NextToken: The token returned by a previous call to", "the method name on the client. For example, if the method name is", "[REQUIRED] The entry that defines an event in your system. You can specify", "THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if an operation can be paginated.", "time. If that happens, FailedEntryCount is non-zero in the response and each entry", "the command, specified as key-value pairs. Each RunCommandTarget block can include only one", "each target. Input (string) --Valid JSON text passed to the target. In this", "code. See also: AWS API Documentation :example: response = client.remove_targets( Rule='string', Ids=[ 'string',", "operation. :type operation_name: string :param operation_name: The operation name. This is the same", "pairs. Each RunCommandTarget block can include only one key, but this key may", "(list) -- [REQUIRED]Currently, we support including only one RunCommandTarget block, which specifies either", "} }, ], 'NextToken': 'string' } :returns: (string) -- (string) -- \"\"\" pass", "customize the data sent to the target. KinesisParameters (dict) --The custom parameter you", "number of results to return. :rtype: dict :return: { 'RuleNames': [ 'string', ],", "name as the method name on the client. For example, if the method", "rule. :type NextToken: string :param NextToken: The token returned by a previous call", "used in the method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a paginator", "on certain event data. You can extract one or more key-value pairs from", "to new or updated rules. Please allow a short period of time for", "on a schedule. Most services in AWS treat : or / as the", "{ 'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified", "InputTemplate (string) -- [REQUIRED]Input template where you can use the values of the", "the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned", "'RuleNames': [ 'string', ], 'NextToken': 'string' } :returns: (string) -- \"\"\" pass def", "a presigned url given a client, its method, and arguments :type ClientMethod: string", "Rule: string :param Rule: [REQUIRED] The name of the rule. :type Targets: list", "[REQUIRED]The JSON path to be extracted from the event and used as the", "shard assignment, when the target is an Amazon Kinesis stream. If you do", "you do not include this parameter, the default is to use the eventId", "Amazon EC2 instance IDs. (string) -- EcsParameters (dict) --Contains the Amazon ECS task", "and rules. Be sure to use the correct ARN characters when creating event", "-- (string) -- InputTemplate (string) -- [REQUIRED]Input template where you can use the", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "role associated with the rule. :rtype: dict :return: { 'RuleArn': 'string' } \"\"\"", "template where you can use the values of the keys from InputPathsMap to", "to the following conditions: The above copyright notice and this permission notice shall", "newly enabled rule. Please allow a short period of time for changes to", "'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string', 'Values': [ 'string',", "associated with the rule. Targets are the resources that are invoked when a", "Software, and to permit persons to whom the Software is furnished to do", "targets. Id (string) -- [REQUIRED]The ID of the target. Arn (string) -- [REQUIRED]The", "] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates", "in the event detail. Detail (string) --In the JSON sense, an object containing", "number of seconds the presigned url is valid for. By default it expires", "'string', 'ErrorMessage': 'string' }, ] } :returns: Rule (string) -- [REQUIRED] The name", "AWS Lambda functions, Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions state", ":type EventPattern: string :param EventPattern: The event pattern. For more information, see Events", "Documentation :example: response = client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1), 'Source': 'string',", "rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes the specified rule. See also: AWS", "'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ]", "client.describe_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of the", "also: AWS API Documentation :example: response = client.remove_targets( Rule='string', Ids=[ 'string', ] )", "Kinesis streams, and AWS Step Functions state machines, CloudWatch Events relies on IAM", "client.delete_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of the", "Params: dict :param Params: The parameters normally passed to ClientMethod. :type ExpiresIn: int", "needs the appropriate permissions. For AWS Lambda and Amazon SNS resources, CloudWatch Events", "call is used. Source (string) --The source of the event. Resources (list) --AWS", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "the operation fails. When you enable a rule, incoming events might not immediately", ":param NextToken: The token returned by a previous call to retrieve the next", "given schedule. A rule can have both an EventPattern and a ScheduleExpression, in", "provide a prefix to match to the rule names. See also: AWS API", "has a schedule expression. When you disable a rule, incoming events might continue", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "The name of the rule. :type NextToken: string :param NextToken: The token returned", "dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage':", "only one key, but this key may specify multiple values. Key (string) --", "arguments :type ClientMethod: string :param ClientMethod: The client method to presign for :type", "method name is create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs), if", ":param ClientMethod: The client method to presign for :type Params: dict :param Params:", ":type Name: string :param Name: [REQUIRED] The name of the rule. :rtype: dict", "the target. RoleArn (string) --The Amazon Resource Name (ARN) of the IAM role", "Please allow a short period of time for changes to take effect. This", "response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule: string :param Rule: [REQUIRED]", "\"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules for the", "If the target is an Amazon Kinesis stream, you can optionally specify which", "Event='string' ) :type EventPattern: string :param EventPattern: [REQUIRED] The event pattern. For more", "or InstanceIds . Values (list) -- [REQUIRED]If Key is tag: tag-key , Values", "[REQUIRED] The event, in JSON format, to test against the event pattern. :rtype:", "Values (list) -- [REQUIRED]If Key is tag: tag-key , Values is a list", "Functions state machines, Run Command, and built-in targets. Id (string) -- [REQUIRED]The ID", "def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets assigned to the specified rule.", "client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type Rule: string :param Rule: [REQUIRED] The", "can include only one key, but this key may specify multiple values. Key", "results. :type Limit: integer :param Limit: The maximum number of results to return.", "rule. :rtype: dict :return: { 'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None, Targets=None):", "to enable you to provide custom input to a target based on certain", "when the rule is triggered. If one rule triggers multiple targets, you can", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "of JSON paths to be extracted from the event. These are key-value pairs,", "\"\"\" Lists your Amazon CloudWatch Events rules. You can either list all the", "person obtaining a copy of this software and associated documentation files (the \"Software\"),", "your account. See also: AWS API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string',", "to be sent the command, specified as key-value pairs. Each RunCommandTarget block can", "'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\" pass", "TaskDefinition . The default is one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes", "--In the JSON sense, an object containing fields, which may also contain nested", "list of Amazon EC2 instance IDs. (string) -- EcsParameters (dict) --Contains the Amazon", "rule. When the rule is triggered, those targets are no longer be invoked.", "Sends custom events to Amazon CloudWatch Events so that they can be matched", "support including only one RunCommandTarget block, which specifies either an array of InstanceIds", "effect. See also: AWS API Documentation :example: response = client.enable_rule( Name='string' ) :type", "targets is supported only in the AWS Management Console. For some target types,", "you can provide a prefix to match to the rule names. See also:", "zero, may be present. (string) -- DetailType (string) --Free-form string used to decide", "expect in the event detail. Detail (string) --In the JSON sense, an object", "notation. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format", "in the Amazon CloudWatch Events User Guide . When you specify Input ,", "rule. :rtype: dict :return: { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string',", "your system. You can specify several parameters for the entry such as the", "might not immediately start matching to new or updated rules. Please allow a", "the IAM role associated with the rule. :rtype: dict :return: { 'RuleArn': 'string'", "tag-key or InstanceIds . Values (list) -- [REQUIRED]If Key is tag: tag-key ,", "in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED", "TaskDefinition . The default is one. :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries':", "def enable_rule(Name=None): \"\"\" Enables the specified rule. If the rule does not exist,", "Event: string :param Event: [REQUIRED] The event, in JSON format, to test against", "modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to", "'string', 'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None): \"\"\" Disables the specified rule.", "fields to expect in the event detail. Detail (string) --In the JSON sense,", "retrieve the next set of results. :type Limit: integer :param Limit: The maximum", "in the RoleARN argument in PutTarget . For more information, see Authentication and", "'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string'", "as the partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON path to be extracted", "client. For example, if the method name is create_foo, and you'd normally invoke", "\"\"\" Adds the specified targets to the specified rule, or updates the targets", "specified rule. If the rule does not exist, the operation fails. When you", "This action can partially fail if too many requests are made at the", "'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': {", ":param Description: A description of the rule. :type RoleArn: string :param RoleArn: The", "'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } :returns:", "client, its method, and arguments :type ClientMethod: string :param ClientMethod: The client method", "name of the rule. :type Targets: list :param Targets: [REQUIRED] The targets to", "more information, see Events and Event Patterns in the Amazon CloudWatch Events User", "Events uses an exact match in event patterns and rules. Be sure to", "type of the event, resources associated with the event, and so on. (dict)", "send customized input to the target. InputPathsMap (dict) --Map of JSON paths to", "\"\"\" Lists the rules for the specified target. You can see which of", "enabled rule. Please allow a short period of time for changes to take", "Description: string :param Description: A description of the rule. :type RoleArn: string :param", "* * ? *)', 'rate(5 minutes)'. :type EventPattern: string :param EventPattern: The event", "which of the rules in Amazon CloudWatch Events can invoke a specific target", "Amazon ECS tasks, see Task Definitions in the Amazon EC2 Container Service Developer", "a tag. (dict) --Information about the EC2 instances that are to be sent", "both an EventPattern and a ScheduleExpression, in which case the rule triggers on", "short period of time for changes to take effect. This action can partially", "specified target. You can see which of the rules in Amazon CloudWatch Events", "to take effect. This action can partially fail if too many requests are", "list all the rules or you can provide a prefix to match to", "with EventPatterns are triggered when a matching event is observed. Rules with ScheduleExpressions", "set of results. :type Limit: integer :param Limit: The maximum number of results", "information, see Events and Event Patterns in the Amazon CloudWatch Events User Guide", "= client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string :param", "RFC3339 . If no timestamp is provided, the timestamp of the PutEvents call", "are no longer be invoked. When you remove a target, when the associated", "'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string' }, 'InputTemplate':", "ARN characters when creating event patterns so that they match the ARN syntax", "the response and each entry in FailedEntries provides the ID of the failed", "be extracted from the event. These are key-value pairs, where each value is", "tag: tag-key , Values is a list of tag values. If Key is", "rule that you are creating or updating. :type ScheduleExpression: string :param ScheduleExpression: The", "The Amazon Resource Name (ARN) of the target resource. :type NextToken: string :param", "to Amazon CloudWatch Events so that they can be matched to rules. See", ":type Name: string :param Name: [REQUIRED] The name of the rule that you", "Name: string :param Name: [REQUIRED] The name of the rule that you are", "custom parameter you can use to control shard assignment, when the target is", "Target types include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS", "InputTransformer , you must use JSON dot notation, not bracket notation. When you", "to use the eventId as the partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON", "Lambda and Amazon SNS resources, CloudWatch Events relies on resource-based policies. For EC2", "parameters for the entry such as the source and type of the event,", "WavyCloud Permission is hereby granted, free of charge, to any person obtaining a", "to a rule and the associated rule triggers soon after, new or updated", "expires in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http", "you are using the rule to invoke Amazon EC2 Run Command. RunCommandTargets (list)", "[REQUIRED]The ID of the target. Arn (string) -- [REQUIRED]The Amazon Resource Name (ARN)", "the method name is create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs),", "'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets", "\"\"\" Disables the specified rule. A disabled rule won't match any events, and", "used for this target when the rule is triggered. If one rule triggers", "in the Amazon EC2 Container Service Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The", "when a rule is triggered. Target types include EC2 instances, AWS Lambda functions,", "must use JSON dot notation, not bracket notation. For more information about JSON", "rule, incoming events might not immediately start matching to new or updated rules.", "create based on the TaskDefinition . The default is one. :rtype: dict :return:", "and task count to be used, if the event target is an Amazon", "observed. Rules with ScheduleExpressions self-trigger based on the given schedule. A rule can", "target. You must use JSON dot notation, not bracket notation. For more information", "not immediately start matching to new or updated rules. Please allow a short", "A disabled rule won't match any events, and won't self-trigger if it has", "roles that you specify in the RoleARN argument in PutTarget . For more", "to any person obtaining a copy of this software and associated documentation files", ":param HttpMethod: The http method to use on the generated url. By default,", "on IAM roles that you specify in the RoleARN argument in PutTarget .", "response = client.disable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name", "with the rule. Targets are the resources that are invoked when a rule", "passed to the target. You must use JSON dot notation, not bracket notation.", "Kinesis Streams Developer Guide . RunCommandParameters (dict) --Parameters used when you are using", "Entries: [REQUIRED] The entry that defines an event in your system. You can", "you specify Input , InputPath , or InputTransformer , you must use JSON", "Lists the targets assigned to the specified rule. See also: AWS API Documentation", "CloudWatch Events so that they can be matched to rules. See also: AWS", "InputTransformer (dict) --Settings to enable you to provide custom input to a target", ":type EventPattern: string :param EventPattern: [REQUIRED] The event pattern. For more information, see", "of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a", "Container Service Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN of the task", "a copy of this software and associated documentation files (the \"Software\"), to deal", "Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "the event target is an Amazon ECS task. For more information about Amazon", "a rule is triggered. Target types include EC2 instances, AWS Lambda functions, Amazon", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "triggered. If one rule triggers multiple targets, you can use a different IAM", "to return. :rtype: dict :return: { 'Rules': [ { 'Name': 'string', 'Arn': 'string',", "any events, and won't self-trigger if it has a schedule expression. When you", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "to use on the generated url. By default, the http method is whatever", "when you are using the rule to invoke Amazon EC2 Run Command. RunCommandTargets", "partition key. For more information, see Amazon Kinesis Streams Key Concepts in the", "before you can delete the rule. When you delete a rule, incoming events", "provided, the timestamp of the PutEvents call is used. Source (string) --The source", "Guide . :type Event: string :param Event: [REQUIRED] The event, in JSON format,", "}, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ], 'NextToken': 'string' }", "you own, Amazon CloudWatch Events needs the appropriate permissions. For AWS Lambda and", "operation fails. When you enable a rule, incoming events might not immediately start", "notation, not bracket notation. For more information, see The JavaScript Object Notation (JSON)", "either an array of InstanceIds or a tag. (dict) --Information about the EC2", "in JSON format, to test against the event pattern. :rtype: dict :return: {", "and to permit persons to whom the Software is furnished to do so,", "expression. When you disable a rule, incoming events might continue to match to", ". TaskDefinitionArn (string) -- [REQUIRED]The ARN of the task definition to use if", "See also: AWS API Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string' ) :type", ":param operation_name: The operation name. This is the same name as the method", "If the rule does not exist, the operation fails. When you enable a", "target. InputPathsMap (dict) --Map of JSON paths to be extracted from the event.", "the Amazon CloudWatch Events User Guide . :type Event: string :param Event: [REQUIRED]", "specified rule. You must remove all targets from a rule using RemoveTargets before", "You must use JSON dot notation, not bracket notation. (string) -- (string) --", "Amazon ECS tasks, AWS Step Functions state machines, Run Command, and built-in targets.", "be paginated. :type operation_name: string :param operation_name: The operation name. This is the", "tasks to create based on the TaskDefinition . The default is one. :rtype:", ":type Limit: integer :param Limit: The maximum number of results to return. :rtype:", ":rtype: dict :return: { 'RuleNames': [ 'string', ], 'NextToken': 'string' } :returns: (string)", ":type Name: string :param Name: [REQUIRED] The name of the rule. \"\"\" pass", "or update a rule, incoming events might not immediately start matching to new", "passed to the target. In this case, nothing from the event itself is", "Ids=None): \"\"\" Removes the specified targets from the specified rule. When the rule", "def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules for the specified target. You", ":type Ids: list :param Ids: [REQUIRED] The IDs of the targets to remove", "of results to return. :rtype: dict :return: { 'Targets': [ { 'Id': 'string',", "def disable_rule(Name=None): \"\"\" Disables the specified rule. A disabled rule won't match any", "from the event and then use that data to send customized input to", "response = client.enable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name", "Event: [REQUIRED] The event, in JSON format, to test against the event pattern.", "Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type", ":param TargetArn: [REQUIRED] The Amazon Resource Name (ARN) of the target resource. :type", "pass def enable_rule(Name=None): \"\"\" Enables the specified rule. If the rule does not", "The JavaScript Object Notation (JSON) Data Interchange Format . InputPath (string) --The value", "no longer be invoked. When you remove a target, when the associated rule", "} \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets assigned to", "and AWS Step Functions state machines, CloudWatch Events relies on IAM roles that", "pass def describe_rule(Name=None): \"\"\" Describes the specified rule. See also: AWS API Documentation", "CloudWatch Events relies on IAM roles that you specify in the RoleARN argument", "Name: [REQUIRED] The name of the rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes", "'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken': 'string' }", "not include this parameter, the default is to use the eventId as the", "}, ] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\"", "containing fields, which may also contain nested subobjects. No constraints are imposed on", "copy of this software and associated documentation files (the \"Software\"), to deal in", "'NextToken': 'string' } :returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\"", "{ 'Key': 'string', 'Values': [ 'string', ] }, ] }, 'EcsParameters': { 'TaskDefinitionArn':", "for. By default it expires in an hour (3600 seconds) :type HttpMethod: string", "events might not immediately start matching to new or updated rules. Please allow", "used when you are using the rule to invoke Amazon EC2 Run Command.", "Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN of the task definition to", "the rule names. See also: AWS API Documentation :example: response = client.list_rules( NamePrefix='string',", "The event, in JSON format, to test against the event pattern. :rtype: dict", "Resource Name (ARN) of the IAM role to be used for this target", "built-in targets is supported only in the AWS Management Console. For some target", "client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string :param Name:", ") :type NamePrefix: string :param NamePrefix: The prefix matching the rule name. :type", "effect. This action can partially fail if too many requests are made at", "the provided event. Most services in AWS treat : or / as the", "on the TaskDefinition . The default is one. :rtype: dict :return: { 'FailedEntryCount':", ":param Limit: The maximum number of results to return. :rtype: dict :return: {", "many requests are made at the same time. If that happens, FailedEntryCount is", "using the rule to invoke Amazon EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently,", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "You can either list all the rules or you can provide a prefix", "is a JSON path. You must use JSON dot notation, not bracket notation.", "triggers multiple targets, you can use a different IAM role for each target.", "specified rule. A disabled rule won't match any events, and won't self-trigger if", "rules. You can either list all the rules or you can provide a", "Description='string', RoleArn='string' ) :type Name: string :param Name: [REQUIRED] The name of the", "'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' }", "the create_foo operation can be paginated, you can use the call client.get_paginator('create_foo'). \"\"\"", "all targets from a rule using RemoveTargets before you can delete the rule.", "is an Amazon Kinesis stream, you can optionally specify which shard the event", "EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS tasks, AWS Step", "targets to the specified rule, or updates the targets if they are already", "a rule, incoming events might continue to match to the disabled rule. Please", "to create based on the TaskDefinition . The default is one. \"\"\" pass", "match to the rule names. See also: AWS API Documentation :example: response =", "invoke a specific target in your account. See also: AWS API Documentation :example:", "in the AWS Management Console. For some target types, PutTargets provides target-specific parameters.", "a previous call to retrieve the next set of results. :type Limit: integer", "123, 'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] }", "which shard the event goes to by using the KinesisParameters argument. To invoke", "Please allow a short period of time for changes to take effect. See", "key, but this key may specify multiple values. Key (string) -- [REQUIRED]Can be", "add to the rule. (dict) --Targets are the resources to be invoked when", "the keys from InputPathsMap to customize the data sent to the target. KinesisParameters", "all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "from InputPathsMap to customize the data sent to the target. KinesisParameters (dict) --The", "JSON dot notation, not bracket notation. For more information about JSON paths, see", "of the rule that you are creating or updating. :type ScheduleExpression: string :param", "remove from the rule. (string) -- :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries':", "invoke the operation as client.create_foo(**kwargs), if the create_foo operation can be paginated, you", "extracting part of the matched event when passing it to the target. You", "You can extract one or more key-value pairs from the event and then", "response = client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources': [", ":param Rule: [REQUIRED] The name of the rule. :type NextToken: string :param NextToken:", "that defines an event in your system. You can specify several parameters for", "Authentication and Access Control in the Amazon CloudWatch Events User Guide . When", "] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ]", "longer be invoked. When you remove a target, when the associated rule triggers,", "must use JSON dot notation, not bracket notation. When you add targets to", "updates the specified rule. Rules are enabled by default, or based on value", "also: AWS API Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type", "rule and the associated rule triggers soon after, new or updated targets might", "[REQUIRED] The targets to update or add to the rule. (dict) --Targets are", "definition and task count to be used, if the event target is an", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the", "the http method is whatever is used in the method's model. \"\"\" pass", "it has a schedule expression. When you disable a rule, incoming events might", "However, CloudWatch Events uses an exact match in event patterns and rules. Be", "a list of tag values. If Key is InstanceIds , Values is a", "targets might not be immediately invoked. Please allow a short period of time", "is an Amazon ECS cluster. TaskCount (integer) --The number of tasks to create", ") :type Name: string :param Name: [REQUIRED] The name of the rule. \"\"\"", "target. You must use JSON dot notation, not bracket notation. For more information,", "of time for changes to take effect. See also: AWS API Documentation :example:", "do not include this parameter, the default is to use the eventId as", "'ErrorMessage': 'string' }, ] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether", "is passed to the target. You must use JSON dot notation, not bracket", "[REQUIRED] The name of the rule that you are creating or updating. :type", "enable you to provide custom input to a target based on certain event", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", ":param Name: [REQUIRED] The name of the rule. \"\"\" pass def describe_rule(Name=None): \"\"\"", "The name of the rule that you are creating or updating. :type ScheduleExpression:", "changes to take effect. See also: AWS API Documentation :example: response = client.disable_rule(", "targets to update or add to the rule. (dict) --Targets are the resources", "default is one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets", "number of tasks to create based on the TaskDefinition . The default is", "client.disable_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of the", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "might continue to match to the disabled rule. Please allow a short period", "Amazon ECS task definition and task count to be used, if the event", "'RunCommandTargets': [ { 'Key': 'string', 'Values': [ 'string', ] }, ] }, 'EcsParameters':", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "event pattern matches the provided event. Most services in AWS treat : or", "Functions state machines, CloudWatch Events relies on IAM roles that you specify in", "a matching event is observed. Rules with ScheduleExpressions self-trigger based on the given", "client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources': [ 'string', ],", "(ARNs). However, CloudWatch Events uses an exact match in event patterns and rules.", "the ID of the failed target and the error code. See also: AWS", "'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ {", "the event, resources associated with the event, and so on. (dict) --Represents an", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "built-in targets. Note that creating rules with built-in targets is supported only in", "match. See also: AWS API Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string' )", ":return: { 'RuleNames': [ 'string', ], 'NextToken': 'string' } :returns: (string) -- \"\"\"", "can use a different IAM role for each target. Input (string) --Valid JSON", "the rule is triggered, those targets are no longer be invoked. When you", "of the rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes the specified rule. See", "API Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' )", "target is an Amazon Kinesis stream. If you do not include this parameter,", "any person obtaining a copy of this software and associated documentation files (the", "task definition to use if the event target is an Amazon ECS cluster.", "to the target. InputPathsMap (dict) --Map of JSON paths to be extracted from", "string :param operation_name: The operation name. This is the same name as the", "to update or add to the rule. (dict) --Targets are the resources to", "the targets assigned to the specified rule. See also: AWS API Documentation :example:", "at least an EventPattern or ScheduleExpression. Rules with EventPatterns are triggered when a", "also: AWS API Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string',", "a client, its method, and arguments :type ClientMethod: string :param ClientMethod: The client", "using RemoveTargets before you can delete the rule. When you delete a rule,", ":type NamePrefix: string :param NamePrefix: The prefix matching the rule name. :type NextToken:", "invoked when a rule is triggered. Example targets include EC2 instances, AWS Lambda", "where you can use the values of the keys from InputPathsMap to customize", ":param Name: [REQUIRED] The name of the rule. \"\"\" pass def enable_rule(Name=None): \"\"\"", "[REQUIRED]If Key is tag: tag-key , Values is a list of tag values.", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "'KinesisParameters': { 'PartitionKeyPath': 'string' }, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string', 'Values':", "to make API calls against the resources that you own, Amazon CloudWatch Events", "Check if an operation can be paginated. :type operation_name: string :param operation_name: The", "'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None,", "Amazon CloudWatch Events so that they can be matched to rules. See also:", "the Software, and to permit persons to whom the Software is furnished to", "the resources that are invoked when a rule is triggered. Example targets include", "Key is tag: tag-key , Values is a list of tag values. If", "can specify several parameters for the entry such as the source and type", "types include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS tasks,", "State=None, Description=None, RoleArn=None): \"\"\" Creates or updates the specified rule. Rules are enabled", "the data sent to the target. KinesisParameters (dict) --The custom parameter you can", "'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\" pass def", "'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ] ) :type Rule: string", "copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",", "string :param Name: [REQUIRED] The name of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None,", "including only one RunCommandTarget block, which specifies either an array of InstanceIds or", "sense, an object containing fields, which may also contain nested subobjects. No constraints", "}, ], 'NextToken': 'string' } :returns: (string) -- (string) -- \"\"\" pass def", "after, new or updated targets might not be immediately invoked. Please allow a", "When you delete a rule, incoming events might continue to match to the", ". The default is one. \"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the", "the given schedule. A rule can have both an EventPattern and a ScheduleExpression,", "about JSON paths, see JSONPath . InputTransformer (dict) --Settings to enable you to", "updated rules. Please allow a short period of time for changes to take", "--AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns.", "AWS API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn:", "of the event, per RFC3339 . If no timestamp is provided, the timestamp", "number of results to return. :rtype: dict :return: { 'Rules': [ { 'Name':", "Limit=None): \"\"\" Lists the rules for the specified target. You can see which", "incoming events might not immediately start matching to a newly enabled rule. Please", "Service Developer Guide . TaskDefinitionArn (string) -- [REQUIRED]The ARN of the task definition", "see which of the rules in Amazon CloudWatch Events can invoke a specific", "The default is one. :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ {", "When you create or update a rule, incoming events might not immediately start", "Limit=None): \"\"\" Lists the targets assigned to the specified rule. See also: AWS", "AWS Step Functions state machines, Run Command, and built-in targets. Id (string) --", "A description of the rule. :type RoleArn: string :param RoleArn: The Amazon Resource", "pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets assigned to the specified", "must contain at least an EventPattern or ScheduleExpression. Rules with EventPatterns are triggered", "Ids=[ 'string', ] ) :type Rule: string :param Rule: [REQUIRED] The name of", ". InputTransformer (dict) --Settings to enable you to provide custom input to a", "\"\"\" Enables the specified rule. If the rule does not exist, the operation", "defines an event in your system. You can specify several parameters for the", "{ 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' },", "want to match. See also: AWS API Documentation :example: response = client.test_event_pattern( EventPattern='string',", "that happens, FailedEntryCount is non-zero in the response and each entry in FailedEntries", "include only one key, but this key may specify multiple values. Key (string)", "policies. For EC2 instances, Amazon Kinesis streams, and AWS Step Functions state machines,", "ScheduleExpression: The scheduling expression. For example, 'cron(0 20 * * ? *)', 'rate(5", "123 } }, ], 'NextToken': 'string' } :returns: (string) -- (string) -- \"\"\"", "in FailedEntries provides the ID of the failed target and the error code.", "\"\"\" pass def put_events(Entries=None): \"\"\" Sends custom events to Amazon CloudWatch Events so", "by default, or based on value of the state. You can disable a", "which specifies either an array of InstanceIds or a tag. (dict) --Information about", "create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs), if the create_foo operation", ":param Name: [REQUIRED] The name of the rule that you are creating or", "Format . InputPath (string) --The value of the JSONPath that is used for", "NextToken='string', Limit=123 ) :type NamePrefix: string :param NamePrefix: The prefix matching the rule", "get_paginator(operation_name=None): \"\"\" Create a paginator for an operation. :type operation_name: string :param operation_name:", "associated rule triggers soon after, new or updated targets might not be immediately", "passing it to the target. You must use JSON dot notation, not bracket", ":type Targets: list :param Targets: [REQUIRED] The targets to update or add to", "of the keys from InputPathsMap to customize the data sent to the target.", "ID of the failed target and the error code. See also: AWS API", "with ScheduleExpressions self-trigger based on the given schedule. A rule can have both", "schedule. Most services in AWS treat : or / as the same character", "Rule='string', Ids=[ 'string', ] ) :type Rule: string :param Rule: [REQUIRED] The name", "Concepts in the Amazon Kinesis Streams Developer Guide . RunCommandParameters (dict) --Parameters used", "may also contain nested subobjects. No constraints are imposed on its contents. :rtype:", "Guide . RunCommandParameters (dict) --Parameters used when you are using the rule to", "to be used, if the event target is an Amazon ECS task. For", "do so, subject to the following conditions: The above copyright notice and this", "targets from a rule using RemoveTargets before you can delete the rule. When", "The http method to use on the generated url. By default, the http", "'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string'", "'string' } :returns: (string) -- (string) -- \"\"\" pass def put_events(Entries=None): \"\"\" Sends", "{ 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string',", "Functions state machines, and built-in targets. Note that creating rules with built-in targets", "specified targets to the specified rule, or updates the targets if they are", "ScheduleExpression: string :param ScheduleExpression: The scheduling expression. For example, 'cron(0 20 * *", "tag values. If Key is InstanceIds , Values is a list of Amazon", "JSON sense, an object containing fields, which may also contain nested subobjects. No", "The name of the rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables the specified", "to the specified rule. See also: AWS API Documentation :example: response = client.list_targets_by_rule(", "is furnished to do so, subject to the following conditions: The above copyright", "in the response and each entry in FailedEntries provides the ID of the", "Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string :param", "the disabled rule. Please allow a short period of time for changes to", "machines, CloudWatch Events relies on IAM roles that you specify in the RoleARN", "Any number, including zero, may be present. (string) -- DetailType (string) --Free-form string", "AWS Step Functions state machines, CloudWatch Events relies on IAM roles that you", "rule. When you delete a rule, incoming events might continue to match to", "detail. Detail (string) --In the JSON sense, an object containing fields, which may", "JSON dot notation, not bracket notation. When you add targets to a rule", "Event=None): \"\"\" Tests whether the specified event pattern matches the provided event. Most", "This is the same name as the method name on the client. For", "several parameters for the entry such as the source and type of the", "Name (ARN) of the IAM role to be used for this target when", "}, ] } :returns: Rule (string) -- [REQUIRED] The name of the rule.", "Events needs the appropriate permissions. For AWS Lambda and Amazon SNS resources, CloudWatch", "sent the command, specified as key-value pairs. Each RunCommandTarget block can include only", "Amazon Resource Name (ARN) of the IAM role associated with the rule. :rtype:", "list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules for the specified target. You can", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", ". If no timestamp is provided, the timestamp of the PutEvents call is", "rule. :type Ids: list :param Ids: [REQUIRED] The IDs of the targets to", "not be immediately invoked. Please allow a short period of time for changes", "prefix to match to the rule names. See also: AWS API Documentation :example:", "(the \"Software\"), to deal in the Software without restriction, including without limitation the", "can disable a rule using DisableRule . When you create or update a", "Documentation :example: response = client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type Rule: string", "[REQUIRED] The name of the rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes the", "123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] }", "target. Input (string) --Valid JSON text passed to the target. In this case,", "in the Amazon Kinesis Streams Developer Guide . RunCommandParameters (dict) --Parameters used when", "to permit persons to whom the Software is furnished to do so, subject", "OTHER DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if an operation", "rule. If the rule does not exist, the operation fails. When you enable", "number, including zero, may be present. (string) -- DetailType (string) --Free-form string used", "ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or updates the specified rule. Rules", ") :type Name: string :param Name: [REQUIRED] The name of the rule. :rtype:", "a specific target in your account. See also: AWS API Documentation :example: response", "Rules with ScheduleExpressions self-trigger based on the given schedule. A rule can have", "value of the JSONPath that is used for extracting part of the matched", "already associated with the rule. Targets are the resources that are invoked when", "A rule must contain at least an EventPattern or ScheduleExpression. Rules with EventPatterns", "is create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs), if the create_foo", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "Permission is hereby granted, free of charge, to any person obtaining a copy", "Name: [REQUIRED] The name of the rule. :rtype: dict :return: { 'Name': 'string',", "to invoke Amazon EC2 Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support including", "be included in all copies or substantial portions of the Software. THE SOFTWARE", "matching events as well as on a schedule. Most services in AWS treat", "Name (ARN) of the target. RoleArn (string) --The Amazon Resource Name (ARN) of", "whom the Software is furnished to do so, subject to the following conditions:", ". RunCommandParameters (dict) --Parameters used when you are using the rule to invoke", "For some target types, PutTargets provides target-specific parameters. If the target is an", "same character in Amazon Resource Names (ARNs). However, CloudWatch Events uses an exact", "Amazon Kinesis Streams Key Concepts in the Amazon Kinesis Streams Developer Guide .", "State: Indicates whether the rule is enabled or disabled. :type Description: string :param", "of tag values. If Key is InstanceIds , Values is a list of", "'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken': 'string' } \"\"\"", "Rule: [REQUIRED] The name of the rule. :type Targets: list :param Targets: [REQUIRED]", "{ 'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources': [ 'string', ], 'DetailType': 'string',", "IAM roles that you specify in the RoleARN argument in PutTarget . For", "target resource. :type NextToken: string :param NextToken: The token returned by a previous", "Limit=123 ) :type Rule: string :param Rule: [REQUIRED] The name of the rule.", "of the matched event when passing it to the target. You must use", "def can_paginate(operation_name=None): \"\"\" Check if an operation can be paginated. :type operation_name: string", "resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any", "[ { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression':", "AWS Management Console. For some target types, PutTargets provides target-specific parameters. If the", "'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources': [ 'string', ], 'DetailType': 'string', 'Detail':", "entry in FailedEntries provides the ID of the failed target and the error", "custom events to Amazon CloudWatch Events so that they can be matched to", "of the IAM role to be used for this target when the rule", "DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "Run Command, and built-in targets. Id (string) -- [REQUIRED]The ID of the target.", "event target is an Amazon ECS task. For more information about Amazon ECS", "EventPatterns are triggered when a matching event is observed. Rules with ScheduleExpressions self-trigger", "of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "'string', 'Detail': 'string' }, ] ) :type Entries: list :param Entries: [REQUIRED] The", "See also: AWS API Documentation :example: response = client.disable_rule( Name='string' ) :type Name:", ":example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string :param TargetArn:", "Limit: The maximum number of results to return. :rtype: dict :return: { 'RuleNames':", "array of InstanceIds or a tag. (dict) --Information about the EC2 instances that", "the rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables the specified rule. If the", "Ids: [REQUIRED] The IDs of the targets to remove from the rule. (string)", "target based on certain event data. You can extract one or more key-value", "= client.describe_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of", "you create or update a rule, incoming events might not immediately start matching", "'string' } \"\"\" pass def disable_rule(Name=None): \"\"\" Disables the specified rule. A disabled", "happens, FailedEntryCount is non-zero in the response and each entry in FailedEntries provides", "-- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch Events", "Tests whether the specified event pattern matches the provided event. Most services in", "may specify multiple values. Key (string) -- [REQUIRED]Can be either tag: tag-key or", "or updated rules. Please allow a short period of time for changes to", "it to the target. You must use JSON dot notation, not bracket notation.", "create based on the TaskDefinition . The default is one. \"\"\" pass def", "(string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch", "client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the specified rule. You must remove", "Create a paginator for an operation. :type operation_name: string :param operation_name: The operation", "pass def put_targets(Rule=None, Targets=None): \"\"\" Adds the specified targets to the specified rule,", "of the IAM role associated with the rule. :rtype: dict :return: { 'RuleArn':", "[REQUIRED]The Amazon Resource Name (ARN) of the target. RoleArn (string) --The Amazon Resource", "a newly enabled rule. Please allow a short period of time for changes", "\"\"\" pass def remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets from the specified", "OR OTHER DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if an", "string :param Description: A description of the rule. :type RoleArn: string :param RoleArn:", "RoleArn=None): \"\"\" Creates or updates the specified rule. Rules are enabled by default,", "multiple targets, you can use a different IAM role for each target. Input", "-- [REQUIRED]The JSON path to be extracted from the event and used as", "JSON path. You must use JSON dot notation, not bracket notation. (string) --", "schedule. A rule can have both an EventPattern and a ScheduleExpression, in which", "allow a short period of time for changes to take effect. A rule", "to take effect. See also: AWS API Documentation :example: response = client.delete_rule( Name='string'", "JSONPath that is used for extracting part of the matched event when passing", "Control in the Amazon CloudWatch Events User Guide . When you specify Input", "primarily concerns. Any number, including zero, may be present. (string) -- DetailType (string)", "be used, if the event target is an Amazon ECS task. For more", "name. This is the same name as the method name on the client.", "the event and used as the partition key. For more information, see Amazon", "= client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string :param TargetArn: [REQUIRED] The", "ARN syntax in the event you want to match. See also: AWS API", "pairs from the event and then use that data to send customized input", "rule. Rules are enabled by default, or based on value of the state.", "'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None): \"\"\" Disables the specified rule. A", "information about JSON paths, see JSONPath . InputTransformer (dict) --Settings to enable you", "(string) -- InputTemplate (string) -- [REQUIRED]Input template where you can use the values", "types, PutTargets provides target-specific parameters. If the target is an Amazon Kinesis stream,", "use the values of the keys from InputPathsMap to customize the data sent", "the event, per RFC3339 . If no timestamp is provided, the timestamp of", "describe_rule(Name=None): \"\"\" Describes the specified rule. See also: AWS API Documentation :example: response", "and associated documentation files (the \"Software\"), to deal in the Software without restriction,", "which the event primarily concerns. Any number, including zero, may be present. (string)", "without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "used as the partition key. For more information, see Amazon Kinesis Streams Key", "notation, not bracket notation. For more information about JSON paths, see JSONPath .", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "the Amazon CloudWatch Events User Guide . :type State: string :param State: Indicates", "'string' }, ] ) :type Entries: list :param Entries: [REQUIRED] The entry that", "see Events and Event Patterns in the Amazon CloudWatch Events User Guide .", "it expires in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The", "a prefix to match to the rule names. See also: AWS API Documentation", "InstanceIds or a tag. (dict) --Information about the EC2 instances that are to", "to be invoked when a rule is triggered. Target types include EC2 instances,", "of tasks to create based on the TaskDefinition . The default is one.", "to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "that are invoked when a rule is triggered. Example targets include EC2 instances,", "Task Definitions in the Amazon EC2 Container Service Developer Guide . TaskDefinitionArn (string)", "the default is to use the eventId as the partition key. PartitionKeyPath (string)", ":type operation_name: string :param operation_name: The operation name. This is the same name", ":type Rule: string :param Rule: [REQUIRED] The name of the rule. :type Targets:", "[REQUIRED] The name of the rule. \"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):", "(dict) --Settings to enable you to provide custom input to a target based", ":param Name: [REQUIRED] The name of the rule. :rtype: dict :return: { 'Name':", "Rule: [REQUIRED] The name of the rule. :type Ids: list :param Ids: [REQUIRED]", "Patterns in the Amazon CloudWatch Events User Guide . :type Event: string :param", "the timestamp of the PutEvents call is used. Source (string) --The source of", "the same time. If that happens, FailedEntryCount is non-zero in the response and", "InstanceIds . Values (list) -- [REQUIRED]If Key is tag: tag-key , Values is", "to return. :rtype: dict :return: { 'Targets': [ { 'Id': 'string', 'Arn': 'string',", "KinesisParameters (dict) --The custom parameter you can use to control shard assignment, when", "start matching to a newly enabled rule. Please allow a short period of", "(dict) --Contains the Amazon ECS task definition and task count to be used,", "information, see The JavaScript Object Notation (JSON) Data Interchange Format . InputPath (string)", ". Values (list) -- [REQUIRED]If Key is tag: tag-key , Values is a", ", Values is a list of Amazon EC2 instance IDs. (string) -- EcsParameters", "Guide . :type State: string :param State: Indicates whether the rule is enabled", "error code. See also: AWS API Documentation :example: response = client.put_targets( Rule='string', Targets=[", "or / as the same character in Amazon Resource Names (ARNs). However, CloudWatch", "the rule. Targets are the resources that are invoked when a rule is", "you remove a target, when the associated rule triggers, removed targets might continue", "supported only in the AWS Management Console. For some target types, PutTargets provides", "information about Amazon ECS tasks, see Task Definitions in the Amazon EC2 Container", "Step Functions state machines, CloudWatch Events relies on IAM roles that you specify", "ECS task. For more information about Amazon ECS tasks, see Task Definitions in", "-- [REQUIRED]The ARN of the task definition to use if the event target", "Name='string' ) :type Name: string :param Name: [REQUIRED] The name of the rule.", "The scheduling expression. For example, 'cron(0 20 * * ? *)', 'rate(5 minutes)'.", "You can specify several parameters for the entry such as the source and", "resources that are invoked when a rule is triggered. Example targets include EC2", "extract one or more key-value pairs from the event and then use that", "Generate a presigned url given a client, its method, and arguments :type ClientMethod:", "Names (ARNs). However, CloudWatch Events uses an exact match in event patterns and", "See also: AWS API Documentation :example: response = client.put_targets( Rule='string', Targets=[ { 'Id':", "'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None):", "enabled or disabled. :type Description: string :param Description: A description of the rule.", "model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a paginator for an operation. :type", "JSON path to be extracted from the event and used as the partition", "(ARN), which the event primarily concerns. Any number, including zero, may be present.", "instances, Amazon Kinesis streams, and AWS Step Functions state machines, CloudWatch Events relies", "can use the values of the keys from InputPathsMap to customize the data", "to the target. You must use JSON dot notation, not bracket notation. For", "AWS Step Functions state machines, and built-in targets. Note that creating rules with", "contents. :rtype: dict :return: { 'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string', 'ErrorCode':", "The name of the rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes the specified", "AWS API Documentation :example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix:", "rule triggers, removed targets might continue to be invoked. Please allow a short", "can use the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the specified", "If no timestamp is provided, the timestamp of the PutEvents call is used.", "OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "period of time for changes to take effect. This action can partially fail", "RunCommandParameters field. To be able to make API calls against the resources that", "the rule does not exist, the operation fails. When you enable a rule,", "as the partition key. For more information, see Amazon Kinesis Streams Key Concepts", "are already associated with the rule. Targets are the resources that are invoked", "(ARN) of the IAM role associated with the rule. :rtype: dict :return: {", "changes to take effect. See also: AWS API Documentation :example: response = client.enable_rule(", "key. PartitionKeyPath (string) -- [REQUIRED]The JSON path to be extracted from the event", "the TaskDefinition . The default is one. :rtype: dict :return: { 'FailedEntryCount': 123,", "in Amazon CloudWatch Events can invoke a specific target in your account. See", "Limit=123 ) :type NamePrefix: string :param NamePrefix: The prefix matching the rule name.", "task count to be used, if the event target is an Amazon ECS", "/ as the same character in Amazon Resource Names (ARNs). However, CloudWatch Events", "streams, and AWS Step Functions state machines, CloudWatch Events relies on IAM roles", "When you specify Input , InputPath , or InputTransformer , you must use", "timestamp is provided, the timestamp of the PutEvents call is used. Source (string)", "be paginated, you can use the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\"", "event to be submitted. Time (datetime) --The timestamp of the event, per RFC3339", "(ARN) of the IAM role to be used for this target when the", "{ 'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string'", "target is an Amazon ECS task. For more information about Amazon ECS tasks,", "operation_name: string :param operation_name: The operation name. This is the same name as", "\"\"\" pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url given", "(c) 2016 WavyCloud Permission is hereby granted, free of charge, to any person", "rule must contain at least an EventPattern or ScheduleExpression. Rules with EventPatterns are", "this permission notice shall be included in all copies or substantial portions of", "rule is triggered. Example targets include EC2 instances, AWS Lambda functions, Amazon Kinesis", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check", "can extract one or more key-value pairs from the event and then use", "integer :param Limit: The maximum number of results to return. :rtype: dict :return:", "} }, ] ) :type Rule: string :param Rule: [REQUIRED] The name of", "delete the rule. When you delete a rule, incoming events might continue to", "Amazon Kinesis Streams Developer Guide . RunCommandParameters (dict) --Parameters used when you are", "are invoked when a rule is triggered. Example targets include EC2 instances, AWS", "def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "targets are no longer be invoked. When you remove a target, when the", "'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None): \"\"\" Disables the", ":return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string'", "dot notation, not bracket notation. For more information, see The JavaScript Object Notation", "text passed to the target. In this case, nothing from the event itself", "the specified rule. See also: AWS API Documentation :example: response = client.describe_rule( Name='string'", "must remove all targets from a rule using RemoveTargets before you can delete", "For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .", "method to presign for :type Params: dict :param Params: The parameters normally passed", "operation_name: The operation name. This is the same name as the method name", "(dict) --Map of JSON paths to be extracted from the event. These are", "} \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None): \"\"\" Creates or", ":type HttpMethod: string :param HttpMethod: The http method to use on the generated", "] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ],", "rules. See also: AWS API Documentation :example: response = client.put_events( Entries=[ { 'Time':", "Each RunCommandTarget block can include only one key, but this key may specify", "API Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule: string", "The prefix matching the rule name. :type NextToken: string :param NextToken: The token", "to be extracted from the event. These are key-value pairs, where each value", "rule. :type Targets: list :param Targets: [REQUIRED] The targets to update or add", "whether the specified event pattern matches the provided event. Most services in AWS", "per RFC3339 . If no timestamp is provided, the timestamp of the PutEvents", "Run Command. RunCommandTargets (list) -- [REQUIRED]Currently, we support including only one RunCommandTarget block,", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION", "the IAM role to be used for this target when the rule is", "target types, PutTargets provides target-specific parameters. If the target is an Amazon Kinesis", "return. :rtype: dict :return: { 'RuleNames': [ 'string', ], 'NextToken': 'string' } :returns:", "by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including", "the specified targets to the specified rule, or updates the targets if they", "'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None): \"\"\" Disables", "(ARN) of the target. RoleArn (string) --The Amazon Resource Name (ARN) of the", "disabled rule won't match any events, and won't self-trigger if it has a", "Software is furnished to do so, subject to the following conditions: The above", "are to be sent the command, specified as key-value pairs. Each RunCommandTarget block", "NextToken=None, Limit=None): \"\"\" Lists the targets assigned to the specified rule. See also:", "(dict) --Parameters used when you are using the rule to invoke Amazon EC2", "'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } :returns: Rule (string) -- [REQUIRED]", ":type Description: string :param Description: A description of the rule. :type RoleArn: string", "Event Patterns in the Amazon CloudWatch Events User Guide . :type Event: string", "AWS API Documentation :example: response = client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn':", "rule. You must remove all targets from a rule using RemoveTargets before you", "API Documentation :example: response = client.delete_rule( Name='string' ) :type Name: string :param Name:", "method to use on the generated url. By default, the http method is", ":returns: (string) -- (string) -- \"\"\" pass def put_events(Entries=None): \"\"\" Sends custom events", "RunCommandTargets (list) -- [REQUIRED]Currently, we support including only one RunCommandTarget block, which specifies", "ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned url", "Name: string :param Name: [REQUIRED] The name of the rule. :rtype: dict :return:", "[REQUIRED] The name of the rule. :rtype: dict :return: { 'Name': 'string', 'Arn':", "list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets assigned to the specified rule. See", "a ScheduleExpression, in which case the rule triggers on matching events as well", "an event in your system. You can specify several parameters for the entry", "changes to take effect. A rule must contain at least an EventPattern or", "either tag: tag-key or InstanceIds . Values (list) -- [REQUIRED]If Key is tag:", "can invoke a specific target in your account. See also: AWS API Documentation", "use a different IAM role for each target. Input (string) --Valid JSON text", "parameters. If the target is an Amazon Kinesis stream, you can optionally specify", "target in your account. See also: AWS API Documentation :example: response = client.list_rule_names_by_target(", "same time. If that happens, FailedEntryCount is non-zero in the response and each", "InputPath (string) --The value of the JSONPath that is used for extracting part", ":return: { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description':", "documentation files (the \"Software\"), to deal in the Software without restriction, including without", "and won't self-trigger if it has a schedule expression. When you disable a", "User Guide . When you specify Input , InputPath , or InputTransformer ,", "--The number of tasks to create based on the TaskDefinition . The default", "the specified rule, or updates the targets if they are already associated with", "For EC2 instances, Amazon Kinesis streams, and AWS Step Functions state machines, CloudWatch", "= client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string :param EventPattern: [REQUIRED] The event", "create or update a rule, incoming events might not immediately start matching to", "files (the \"Software\"), to deal in the Software without restriction, including without limitation", "paths, see JSONPath . InputTransformer (dict) --Settings to enable you to provide custom", "extracted from the event. These are key-value pairs, where each value is a", "imposed on its contents. :rtype: dict :return: { 'FailedEntryCount': 123, 'Entries': [ {", "by a previous call to retrieve the next set of results. :type Limit:", "Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines, Run Command, and", "include this parameter, the default is to use the eventId as the partition", "not bracket notation. When you add targets to a rule and the associated", "to do so, subject to the following conditions: The above copyright notice and", "command on multiple EC2 instances with one rule, you can use the RunCommandParameters", "self-trigger based on the given schedule. A rule can have both an EventPattern", "(integer) --The number of tasks to create based on the TaskDefinition . The", "they match the ARN syntax in the event you want to match. See", "of the rule. :type Targets: list :param Targets: [REQUIRED] The targets to update", "'string' }, ] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None, EventPattern=None, State=None, Description=None, RoleArn=None):", "Rules are enabled by default, or based on value of the state. You", "the correct ARN characters when creating event patterns so that they match the", "pass def delete_rule(Name=None): \"\"\" Deletes the specified rule. You must remove all targets", "NextToken='string', Limit=123 ) :type Rule: string :param Rule: [REQUIRED] The name of the", "an exact match in event patterns and rules. Be sure to use the", "remove_targets(Rule=None, Ids=None): \"\"\" Removes the specified targets from the specified rule. When the", "code. See also: AWS API Documentation :example: response = client.put_targets( Rule='string', Targets=[ {", "generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url given a client, its", "the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "'Source': 'string', 'Resources': [ 'string', ], 'DetailType': 'string', 'Detail': 'string' }, ] )", "string :param NamePrefix: The prefix matching the rule name. :type NextToken: string :param", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS tasks, AWS", "and Access Control in the Amazon CloudWatch Events User Guide . When you", "= client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string',", "also: AWS API Documentation :example: response = client.delete_rule( Name='string' ) :type Name: string", "time for changes to take effect. This action can partially fail if too", "valid for. By default it expires in an hour (3600 seconds) :type HttpMethod:", "see JSONPath . InputTransformer (dict) --Settings to enable you to provide custom input", "from a rule using RemoveTargets before you can delete the rule. When you", "\"\"\" pass def get_waiter(): \"\"\" \"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists", "normally invoke the operation as client.create_foo(**kwargs), if the create_foo operation can be paginated,", "rule is triggered. If one rule triggers multiple targets, you can use a", "partition key. PartitionKeyPath (string) -- [REQUIRED]The JSON path to be extracted from the", "When you enable a rule, incoming events might not immediately start matching to", "PutTargets provides target-specific parameters. If the target is an Amazon Kinesis stream, you", "concerns. Any number, including zero, may be present. (string) -- DetailType (string) --Free-form", "CloudWatch Events User Guide . When you specify Input , InputPath , or", "one or more key-value pairs from the event and then use that data", "response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string", "'string' }, ], 'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\"", "events as well as on a schedule. Most services in AWS treat :", "if it has a schedule expression. When you disable a rule, incoming events", "time for changes to take effect. See also: AWS API Documentation :example: response", "'string', 'InputTransformer': { 'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': {", "instances with one rule, you can use the RunCommandParameters field. To be able", "the rule. (string) -- :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ {", ":example: response = client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string :param EventPattern: [REQUIRED]", "}, ] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified", "matching the rule name. :type NextToken: string :param NextToken: The token returned by", "which case the rule triggers on matching events as well as on a", "in which case the rule triggers on matching events as well as on", "disable a rule, incoming events might continue to match to the disabled rule.", "or updating. :type ScheduleExpression: string :param ScheduleExpression: The scheduling expression. For example, 'cron(0", "Resource Name (ARN) of the target resource. :type NextToken: string :param NextToken: The", "the target. In this case, nothing from the event itself is passed to", "name. :type NextToken: string :param NextToken: The token returned by a previous call", "the same name as the method name on the client. For example, if", "EventPattern: string :param EventPattern: [REQUIRED] The event pattern. For more information, see Events", "The name of the rule. Targets (list) -- [REQUIRED] The targets to update", "'ErrorMessage': 'string' }, ] } :returns: Rule (string) -- [REQUIRED] The name of", "so that they can be matched to rules. See also: AWS API Documentation", "matched event when passing it to the target. You must use JSON dot", "an Amazon Kinesis stream, you can optionally specify which shard the event goes", "The operation name. This is the same name as the method name on", "creating or updating. :type ScheduleExpression: string :param ScheduleExpression: The scheduling expression. For example,", "passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number of seconds the", "'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken': 'string' } \"\"\" pass", "an Amazon ECS cluster. TaskCount (integer) --The number of tasks to create based", "'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string', 'Values': [ 'string', ] }, ]", "is tag: tag-key , Values is a list of tag values. If Key", "be immediately invoked. Please allow a short period of time for changes to", "the create_foo operation can be paginated, you can use the call client.get_paginator('create_foo'). :rtype:", "each entry in FailedEntries provides the ID of the failed target and the", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "target. Arn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the target. RoleArn", "Lists the rules for the specified target. You can see which of the", "events, and won't self-trigger if it has a schedule expression. When you disable", "provided event. Most services in AWS treat : or / as the same", "creating rules with built-in targets is supported only in the AWS Management Console.", ". InputPath (string) --The value of the JSONPath that is used for extracting", "about Amazon ECS tasks, see Task Definitions in the Amazon EC2 Container Service", "update or add to the rule. (dict) --Targets are the resources to be", "the rule. :rtype: dict :return: { 'RuleArn': 'string' } \"\"\" pass def put_targets(Rule=None,", "ECS task definition and task count to be used, if the event target", "pass def get_paginator(operation_name=None): \"\"\" Create a paginator for an operation. :type operation_name: string", "and each entry in FailedEntries provides the ID of the failed target and", "name of the rule. :rtype: dict :return: { 'Name': 'string', 'Arn': 'string', 'EventPattern':", "use JSON dot notation, not bracket notation. For more information about JSON paths,", "For example, if the method name is create_foo, and you'd normally invoke the", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "Rule='string', NextToken='string', Limit=123 ) :type Rule: string :param Rule: [REQUIRED] The name of", "events might not immediately start matching to a newly enabled rule. Please allow", "TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string :param TargetArn: [REQUIRED] The Amazon Resource", "then use that data to send customized input to the target. InputPathsMap (dict)", "'InputPathsMap': { 'string': 'string' }, 'InputTemplate': 'string' }, 'KinesisParameters': { 'PartitionKeyPath': 'string' },", "substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "maximum number of results to return. :rtype: dict :return: { 'Rules': [ {", "the appropriate permissions. For AWS Lambda and Amazon SNS resources, CloudWatch Events relies", "take effect. This action can partially fail if too many requests are made", "def put_events(Entries=None): \"\"\" Sends custom events to Amazon CloudWatch Events so that they", "event in your system. You can specify several parameters for the entry such", "target. KinesisParameters (dict) --The custom parameter you can use to control shard assignment,", "If you do not include this parameter, the default is to use the", "the event you want to match. See also: AWS API Documentation :example: response", "\"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None): \"\"\" Lists your Amazon CloudWatch Events rules.", "be invoked when a rule is triggered. Target types include EC2 instances, AWS", "See also: AWS API Documentation :example: response = client.put_events( Entries=[ { 'Time': datetime(2015,", "the method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a paginator for an", "= client.delete_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name of", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\" \"\"\"", "to be used for this target when the rule is triggered. If one", "rule. (string) -- :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId':", "DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if an operation can", "When you disable a rule, incoming events might continue to match to the", "Ids: list :param Ids: [REQUIRED] The IDs of the targets to remove from", "invoke a command on multiple EC2 instances with one rule, you can use", "AWS API Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type Rule:", "'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string'", "[REQUIRED]The ARN of the task definition to use if the event target is", "'TaskCount': 123 } }, ], 'NextToken': 'string' } :returns: (string) -- (string) --", "(datetime) --The timestamp of the event, per RFC3339 . If no timestamp is", "tasks, AWS Step Functions state machines, Run Command, and built-in targets. Id (string)", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "(string) -- (string) -- \"\"\" pass def put_events(Entries=None): \"\"\" Sends custom events to", "JSON format, to test against the event pattern. :rtype: dict :return: { 'Result':", "whatever is used in the method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create", "new or updated targets might not be immediately invoked. Please allow a short", "= client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string :param NamePrefix: The prefix", "used. Source (string) --The source of the event. Resources (list) --AWS resources, identified", "start matching to new or updated rules. Please allow a short period of", "response = client.describe_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name", "maximum number of results to return. :rtype: dict :return: { 'Targets': [ {", "built-in targets. Id (string) -- [REQUIRED]The ID of the target. Arn (string) --", "pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): \"\"\" Generate a presigned url given a", "(string) -- [REQUIRED]The JSON path to be extracted from the event and used", "Amazon CloudWatch Events needs the appropriate permissions. For AWS Lambda and Amazon SNS", "Amazon Resource Name (ARN) of the IAM role to be used for this", "Values is a list of Amazon EC2 instance IDs. (string) -- EcsParameters (dict)", "each value is a JSON path. You must use JSON dot notation, not", "\"\"\" pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules for the specified", "-- DetailType (string) --Free-form string used to decide what fields to expect in", "Time (datetime) --The timestamp of the event, per RFC3339 . If no timestamp", "(string) --Free-form string used to decide what fields to expect in the event", "this parameter, the default is to use the eventId as the partition key.", "Targets=None): \"\"\" Adds the specified targets to the specified rule, or updates the", "[REQUIRED] The event pattern. For more information, see Events and Event Patterns in", "'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\"", "Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer':", "is triggered. Example targets include EC2 instances, AWS Lambda functions, Amazon Kinesis streams,", "the target resource. :type NextToken: string :param NextToken: The token returned by a", "Amazon SNS resources, CloudWatch Events relies on resource-based policies. For EC2 instances, Amazon", "EC2 instance IDs. (string) -- EcsParameters (dict) --Contains the Amazon ECS task definition", "string :param NextToken: The token returned by a previous call to retrieve the", "contain nested subobjects. No constraints are imposed on its contents. :rtype: dict :return:", "You can disable a rule using DisableRule . When you create or update", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "character in Amazon Resource Names (ARNs). However, CloudWatch Events uses an exact match", "Events relies on resource-based policies. For EC2 instances, Amazon Kinesis streams, and AWS", "when a matching event is observed. Rules with ScheduleExpressions self-trigger based on the", "[REQUIRED]Currently, we support including only one RunCommandTarget block, which specifies either an array", "'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } :returns: Rule (string) --", "[ { 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass", "], 'NextToken': 'string' } :returns: (string) -- \"\"\" pass def list_rules(NamePrefix=None, NextToken=None, Limit=None):", "rule does not exist, the operation fails. When you enable a rule, incoming", "specified rule. See also: AWS API Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string',", "the event goes to by using the KinesisParameters argument. To invoke a command", "information, see Amazon Kinesis Streams Key Concepts in the Amazon Kinesis Streams Developer", ":return: { 'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string'", "response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 ) :type TargetArn: string :param TargetArn: [REQUIRED]", "key-value pairs. Each RunCommandTarget block can include only one key, but this key", "of the rule. Targets (list) -- [REQUIRED] The targets to update or add", "generated url. By default, the http method is whatever is used in the", "that data to send customized input to the target. InputPathsMap (dict) --Map of", "can be matched to rules. See also: AWS API Documentation :example: response =", "the rule. :rtype: dict :return: { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression':", "to match. See also: AWS API Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string',", "'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\"", "response = client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input':", "See also: AWS API Documentation :example: response = client.list_rule_names_by_target( TargetArn='string', NextToken='string', Limit=123 )", "= client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1), 'Source': 'string', 'Resources': [ 'string',", "event target is an Amazon ECS cluster. TaskCount (integer) --The number of tasks", "Patterns in the Amazon CloudWatch Events User Guide . :type State: string :param", "you can use the values of the keys from InputPathsMap to customize the", "EventPattern: [REQUIRED] The event pattern. For more information, see Events and Event Patterns", "and the error code. See also: AWS API Documentation :example: response = client.put_targets(", "{ 'RuleNames': [ 'string', ], 'NextToken': 'string' } :returns: (string) -- \"\"\" pass", "identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number,", ":example: response = client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string :param NamePrefix:", "you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def get_waiter(): \"\"\"", "specify multiple values. Key (string) -- [REQUIRED]Can be either tag: tag-key or InstanceIds", "you must use JSON dot notation, not bracket notation. When you add targets", "or disabled. :type Description: string :param Description: A description of the rule. :type", ") :type EventPattern: string :param EventPattern: [REQUIRED] The event pattern. For more information,", "disable a rule using DisableRule . When you create or update a rule,", "of time for changes to take effect. This action can partially fail if", "also: AWS API Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 ) :type", "allow a short period of time for changes to take effect. This action", "permission notice shall be included in all copies or substantial portions of the", "'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets assigned", "the same character in Amazon Resource Names (ARNs). However, CloudWatch Events uses an", "Kinesis stream, you can optionally specify which shard the event goes to by", "See also: AWS API Documentation :example: response = client.list_targets_by_rule( Rule='string', NextToken='string', Limit=123 )", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "not exist, the operation fails. When you enable a rule, incoming events might", "NextToken: string :param NextToken: The token returned by a previous call to retrieve", "values of the keys from InputPathsMap to customize the data sent to the", "Documentation :example: response = client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string :param EventPattern:", "rule. A disabled rule won't match any events, and won't self-trigger if it", "based on the given schedule. A rule can have both an EventPattern and", ":rtype: dict :return: { 'Rules': [ { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string',", "role to be used for this target when the rule is triggered. If", "'string', 'Values': [ 'string', ] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount':", "where each value is a JSON path. You must use JSON dot notation,", "used, if the event target is an Amazon ECS task. For more information", "use the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None): \"\"\" Deletes the specified rule.", "--Targets are the resources to be invoked when a rule is triggered. Target", "to the disabled rule. Please allow a short period of time for changes", "the JSON sense, an object containing fields, which may also contain nested subobjects.", "what fields to expect in the event detail. Detail (string) --In the JSON", "you are creating or updating. :type ScheduleExpression: string :param ScheduleExpression: The scheduling expression.", "task definition and task count to be used, if the event target is", "the operation as client.create_foo(**kwargs), if the create_foo operation can be paginated, you can", "the target. KinesisParameters (dict) --The custom parameter you can use to control shard", "is whatever is used in the method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\"", "is valid for. By default it expires in an hour (3600 seconds) :type", "expression. For example, 'cron(0 20 * * ? *)', 'rate(5 minutes)'. :type EventPattern:", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' def", "NextToken=None, Limit=None): \"\"\" Lists the rules for the specified target. You can see", "-- [REQUIRED]If Key is tag: tag-key , Values is a list of tag", "to the target. In this case, nothing from the event itself is passed", "(string) --In the JSON sense, an object containing fields, which may also contain", "goes to by using the KinesisParameters argument. To invoke a command on multiple", "can be paginated, you can use the call client.get_paginator('create_foo'). \"\"\" pass def delete_rule(Name=None):", "[REQUIRED] The name of the rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables the", "-- :rtype: dict :return: { 'FailedEntryCount': 123, 'FailedEntries': [ { 'TargetId': 'string', 'ErrorCode':", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and", "'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ],", "'Values': [ 'string', ] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123", "(JSON) Data Interchange Format . InputPath (string) --The value of the JSONPath that", "when the associated rule triggers, removed targets might continue to be invoked. Please", "License (MIT) Copyright (c) 2016 WavyCloud Permission is hereby granted, free of charge,", ". When you create or update a rule, incoming events might not immediately", "decide what fields to expect in the event detail. Detail (string) --In the", "triggered. Example targets include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon", "name on the client. For example, if the method name is create_foo, and", "Events relies on IAM roles that you specify in the RoleARN argument in", "InputPath , or InputTransformer , you must use JSON dot notation, not bracket", "is non-zero in the response and each entry in FailedEntries provides the ID", "\"\"\" pass def enable_rule(Name=None): \"\"\" Enables the specified rule. If the rule does", ". :type Event: string :param Event: [REQUIRED] The event, in JSON format, to", "a target based on certain event data. You can extract one or more", "dict :return: { 'FailedEntryCount': 123, 'Entries': [ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage':", "event itself is passed to the target. You must use JSON dot notation,", "Example targets include EC2 instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN", "following conditions: The above copyright notice and this permission notice shall be included", "provides target-specific parameters. If the target is an Amazon Kinesis stream, you can", "'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'RoleArn': 'string' } \"\"\" pass def disable_rule(Name=None): \"\"\"", "a short period of time for changes to take effect. See also: AWS", "entry that defines an event in your system. You can specify several parameters", "one key, but this key may specify multiple values. Key (string) -- [REQUIRED]Can", "with built-in targets is supported only in the AWS Management Console. For some", "to be submitted. Time (datetime) --The timestamp of the event, per RFC3339 .", "match to the deleted rule. Please allow a short period of time for", "field. To be able to make API calls against the resources that you", "see The JavaScript Object Notation (JSON) Data Interchange Format . InputPath (string) --The", "is triggered. If one rule triggers multiple targets, you can use a different", "Step Functions state machines, Run Command, and built-in targets. Id (string) -- [REQUIRED]The", "and then use that data to send customized input to the target. InputPathsMap", "state machines, Run Command, and built-in targets. Id (string) -- [REQUIRED]The ID of", "to the rule names. See also: AWS API Documentation :example: response = client.list_rules(", ": or / as the same character in Amazon Resource Names (ARNs). However,", "The above copyright notice and this permission notice shall be included in all", "paths to be extracted from the event. These are key-value pairs, where each", "you can use the RunCommandParameters field. To be able to make API calls", "Amazon Kinesis stream. If you do not include this parameter, the default is", "triggers soon after, new or updated targets might not be immediately invoked. Please", "example, if the method name is create_foo, and you'd normally invoke the operation", "= client.remove_targets( Rule='string', Ids=[ 'string', ] ) :type Rule: string :param Rule: [REQUIRED]", "pass def list_rule_names_by_target(TargetArn=None, NextToken=None, Limit=None): \"\"\" Lists the rules for the specified target.", "of the target. Arn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the", "value of the state. You can disable a rule using DisableRule . When", "string :param Name: [REQUIRED] The name of the rule. \"\"\" pass def enable_rule(Name=None):", "also: AWS API Documentation :example: response = client.disable_rule( Name='string' ) :type Name: string", "is used in the method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a", "partially fail if too many requests are made at the same time. If", "granted, free of charge, to any person obtaining a copy of this software", "so that they match the ARN syntax in the event you want to", "limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "too many requests are made at the same time. If that happens, FailedEntryCount", "update a rule, incoming events might not immediately start matching to new or", "for the specified target. You can see which of the rules in Amazon", "if the method name is create_foo, and you'd normally invoke the operation as", "Resource Name (ARN) of the IAM role associated with the rule. :rtype: dict", "to the rule. (dict) --Targets are the resources to be invoked when a", "'NextToken': 'string' } \"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets", "return. :rtype: dict :return: { 'Rules': [ { 'Name': 'string', 'Arn': 'string', 'EventPattern':", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "you enable a rule, incoming events might not immediately start matching to a", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "[ 'string', ] }, ] }, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 }", "default is to use the eventId as the partition key. PartitionKeyPath (string) --", "incoming events might continue to match to the disabled rule. Please allow a", "for this target when the rule is triggered. If one rule triggers multiple", "'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'State': 'ENABLED'|'DISABLED', 'Description': 'string', 'ScheduleExpression': 'string', 'RoleArn':", ":example: response = client.put_targets( Rule='string', Targets=[ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string',", "dict :return: { 'Targets': [ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input':", "\"\"\" Create a paginator for an operation. :type operation_name: string :param operation_name: The", "a paginator for an operation. :type operation_name: string :param operation_name: The operation name.", "KinesisParameters argument. To invoke a command on multiple EC2 instances with one rule,", "source and type of the event, resources associated with the event, and so", "'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass def put_rule(Name=None, ScheduleExpression=None,", "-- [REQUIRED]The ID of the target. Arn (string) -- [REQUIRED]The Amazon Resource Name", "are key-value pairs, where each value is a JSON path. You must use", "For more information about Amazon ECS tasks, see Task Definitions in the Amazon", "For more information about JSON paths, see JSONPath . InputTransformer (dict) --Settings to", "}, 'EcsParameters': { 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ] ) :type Rule:", "or more key-value pairs from the event and then use that data to", "HttpMethod: string :param HttpMethod: The http method to use on the generated url.", "you specify in the RoleARN argument in PutTarget . For more information, see", "only one RunCommandTarget block, which specifies either an array of InstanceIds or a", "\"\"\" Sends custom events to Amazon CloudWatch Events so that they can be", "task. For more information about Amazon ECS tasks, see Task Definitions in the", "use if the event target is an Amazon ECS cluster. TaskCount (integer) --The", "use to control shard assignment, when the target is an Amazon Kinesis stream.", "with one rule, you can use the RunCommandParameters field. To be able to", "event pattern. For more information, see Events and Event Patterns in the Amazon", "see Task Definitions in the Amazon EC2 Container Service Developer Guide . TaskDefinitionArn", "to the specified rule, or updates the targets if they are already associated", ". :type State: string :param State: Indicates whether the rule is enabled or", "{ 'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ] ) :type Rule: string :param", "key-value pairs, where each value is a JSON path. You must use JSON", "RoleArn: string :param RoleArn: The Amazon Resource Name (ARN) of the IAM role", "InputPathsMap (dict) --Map of JSON paths to be extracted from the event. These", "sure to use the correct ARN characters when creating event patterns so that", "tag-key , Values is a list of tag values. If Key is InstanceIds", "Removes the specified targets from the specified rule. When the rule is triggered,", "the target is an Amazon Kinesis stream. If you do not include this", "pattern. For more information, see Events and Event Patterns in the Amazon CloudWatch", "used to decide what fields to expect in the event detail. Detail (string)", "of the event. Resources (list) --AWS resources, identified by Amazon Resource Name (ARN),", "more information about Amazon ECS tasks, see Task Definitions in the Amazon EC2", ":type State: string :param State: Indicates whether the rule is enabled or disabled.", "the specified rule. A disabled rule won't match any events, and won't self-trigger", "-- \"\"\" pass def put_events(Entries=None): \"\"\" Sends custom events to Amazon CloudWatch Events", "associated with the rule. :rtype: dict :return: { 'RuleArn': 'string' } \"\"\" pass", "event, and so on. (dict) --Represents an event to be submitted. Time (datetime)", "when a rule is triggered. Example targets include EC2 instances, AWS Lambda functions,", "the EC2 instances that are to be sent the command, specified as key-value", "input to a target based on certain event data. You can extract one", "for an operation. :type operation_name: string :param operation_name: The operation name. This is", "enable a rule, incoming events might not immediately start matching to a newly", "changes to take effect. This action can partially fail if too many requests", "[REQUIRED]Input template where you can use the values of the keys from InputPathsMap", ":param Ids: [REQUIRED] The IDs of the targets to remove from the rule.", "and/or sell copies of the Software, and to permit persons to whom the", "targets to a rule and the associated rule triggers soon after, new or", "[ { 'EventId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } \"\"\" pass", "rule is triggered, those targets are no longer be invoked. When you remove", "is supported only in the AWS Management Console. For some target types, PutTargets", "} :returns: (string) -- (string) -- \"\"\" pass def put_events(Entries=None): \"\"\" Sends custom", "of charge, to any person obtaining a copy of this software and associated", "the event itself is passed to the target. You must use JSON dot", "one rule, you can use the RunCommandParameters field. To be able to make", "that you specify in the RoleARN argument in PutTarget . For more information,", "invoked. When you remove a target, when the associated rule triggers, removed targets", "'string', 'ScheduleExpression': 'string', 'RoleArn': 'string' }, ], 'NextToken': 'string' } \"\"\" pass def", "[REQUIRED] The name of the rule. :type NextToken: string :param NextToken: The token", "an EventPattern and a ScheduleExpression, in which case the rule triggers on matching", "For more information, see Authentication and Access Control in the Amazon CloudWatch Events", "IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\" Check if an operation can be", "the rule name. :type NextToken: string :param NextToken: The token returned by a", "have both an EventPattern and a ScheduleExpression, in which case the rule triggers", "nothing from the event itself is passed to the target. You must use", "to the target. KinesisParameters (dict) --The custom parameter you can use to control", "notation, not bracket notation. (string) -- (string) -- InputTemplate (string) -- [REQUIRED]Input template", "API Documentation :example: response = client.describe_rule( Name='string' ) :type Name: string :param Name:", ") :type Rule: string :param Rule: [REQUIRED] The name of the rule. :type", "dot notation, not bracket notation. For more information about JSON paths, see JSONPath", "sent to the target. KinesisParameters (dict) --The custom parameter you can use to", "Rule (string) -- [REQUIRED] The name of the rule. Targets (list) -- [REQUIRED]", "resources associated with the event, and so on. (dict) --Represents an event to", "response = client.test_event_pattern( EventPattern='string', Event='string' ) :type EventPattern: string :param EventPattern: [REQUIRED] The", "RoleARN argument in PutTarget . For more information, see Authentication and Access Control", "take effect. A rule must contain at least an EventPattern or ScheduleExpression. Rules", "updated targets might not be immediately invoked. Please allow a short period of", "keys from InputPathsMap to customize the data sent to the target. KinesisParameters (dict)", "be present. (string) -- DetailType (string) --Free-form string used to decide what fields", "are enabled by default, or based on value of the state. You can", "* ? *)', 'rate(5 minutes)'. :type EventPattern: string :param EventPattern: The event pattern.", "No constraints are imposed on its contents. :rtype: dict :return: { 'FailedEntryCount': 123,", "The name of the rule. :type Targets: list :param Targets: [REQUIRED] The targets", "def describe_rule(Name=None): \"\"\" Describes the specified rule. See also: AWS API Documentation :example:", "--Free-form string used to decide what fields to expect in the event detail.", "1, 1), 'Source': 'string', 'Resources': [ 'string', ], 'DetailType': 'string', 'Detail': 'string' },", "or InputTransformer , you must use JSON dot notation, not bracket notation. When", "not bracket notation. (string) -- (string) -- InputTemplate (string) -- [REQUIRED]Input template where", "string :param ScheduleExpression: The scheduling expression. For example, 'cron(0 20 * * ?", "], 'DetailType': 'string', 'Detail': 'string' }, ] ) :type Entries: list :param Entries:", "operation name. This is the same name as the method name on the", "Command, and built-in targets. Id (string) -- [REQUIRED]The ID of the target. Arn", "API Documentation :example: response = client.put_events( Entries=[ { 'Time': datetime(2015, 1, 1), 'Source':", "are triggered when a matching event is observed. Rules with ScheduleExpressions self-trigger based", "name of the rule. \"\"\" pass def describe_rule(Name=None): \"\"\" Describes the specified rule.", "the error code. See also: AWS API Documentation :example: response = client.put_targets( Rule='string',", "be paginated, you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} \"\"\" pass def", "string :param State: Indicates whether the rule is enabled or disabled. :type Description:", "extracted from the event and used as the partition key. For more information,", "effect. See also: AWS API Documentation :example: response = client.disable_rule( Name='string' ) :type", "portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "an Amazon Kinesis stream. If you do not include this parameter, the default", "When you remove a target, when the associated rule triggers, removed targets might", "in PutTarget . For more information, see Authentication and Access Control in the", ":rtype: dict :return: { 'Name': 'string', 'Arn': 'string', 'EventPattern': 'string', 'ScheduleExpression': 'string', 'State':", "of the targets to remove from the rule. (string) -- :rtype: dict :return:", "State: string :param State: Indicates whether the rule is enabled or disabled. :type", "make API calls against the resources that you own, Amazon CloudWatch Events needs", "target is an Amazon Kinesis stream, you can optionally specify which shard the", "dot notation, not bracket notation. When you add targets to a rule and", "to remove from the rule. (string) -- :rtype: dict :return: { 'FailedEntryCount': 123,", "are the resources to be invoked when a rule is triggered. Target types", "block can include only one key, but this key may specify multiple values.", "AWS API Documentation :example: response = client.put_rule( Name='string', ScheduleExpression='string', EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string'", "parameters normally passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number of", "about the EC2 instances that are to be sent the command, specified as", "}, 'RunCommandParameters': { 'RunCommandTargets': [ { 'Key': 'string', 'Values': [ 'string', ] },", "the resources that you own, Amazon CloudWatch Events needs the appropriate permissions. For", "EventPattern='string', State='ENABLED'|'DISABLED', Description='string', RoleArn='string' ) :type Name: string :param Name: [REQUIRED] The name", "an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http method to", "Amazon Resource Name (ARN) of the target resource. :type NextToken: string :param NextToken:", "RoleArn (string) --The Amazon Resource Name (ARN) of the IAM role to be", ":rtype: dict :return: { 'Targets': [ { 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string',", "AWS Lambda and Amazon SNS resources, CloudWatch Events relies on resource-based policies. For", "'TaskDefinitionArn': 'string', 'TaskCount': 123 } }, ], 'NextToken': 'string' } :returns: (string) --", "\"\"\" pass def list_targets_by_rule(Rule=None, NextToken=None, Limit=None): \"\"\" Lists the targets assigned to the", "rules. Please allow a short period of time for changes to take effect.", "client.list_rules( NamePrefix='string', NextToken='string', Limit=123 ) :type NamePrefix: string :param NamePrefix: The prefix matching", "targets. Note that creating rules with built-in targets is supported only in the", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "(string) -- [REQUIRED]The ID of the target. Arn (string) -- [REQUIRED]The Amazon Resource", "number of results to return. :rtype: dict :return: { 'Targets': [ { 'Id':", "to use if the event target is an Amazon ECS cluster. TaskCount (integer)", "on. (dict) --Represents an event to be submitted. Time (datetime) --The timestamp of", "resources to be invoked when a rule is triggered. Target types include EC2", "Documentation :example: response = client.delete_rule( Name='string' ) :type Name: string :param Name: [REQUIRED]", "EventPattern: The event pattern. For more information, see Events and Event Patterns in", "the event. Resources (list) --AWS resources, identified by Amazon Resource Name (ARN), which", "targets assigned to the specified rule. See also: AWS API Documentation :example: response", "(MIT) Copyright (c) 2016 WavyCloud Permission is hereby granted, free of charge, to", "be able to make API calls against the resources that you own, Amazon", "to a target based on certain event data. You can extract one or", "invoked. Please allow a short period of time for changes to take effect.", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "subject to the following conditions: The above copyright notice and this permission notice", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "continue to be invoked. Please allow a short period of time for changes", "on its contents. :rtype: dict :return: { 'FailedEntryCount': 123, 'Entries': [ { 'EventId':", "your Amazon CloudWatch Events rules. You can either list all the rules or", "method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a paginator for an operation.", "'NextToken': 'string' } :returns: (string) -- (string) -- \"\"\" pass def put_events(Entries=None): \"\"\"", "be submitted. Time (datetime) --The timestamp of the event, per RFC3339 . If", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' def can_paginate(operation_name=None): \"\"\"", "Targets (list) -- [REQUIRED] The targets to update or add to the rule.", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "'string', 'TaskCount': 123 } }, ] ) :type Rule: string :param Rule: [REQUIRED]", "in the method's model. \"\"\" pass def get_paginator(operation_name=None): \"\"\" Create a paginator for", "use that data to send customized input to the target. InputPathsMap (dict) --Map", "For more information, see Events and Event Patterns in the Amazon CloudWatch Events", "effect. See also: AWS API Documentation :example: response = client.delete_rule( Name='string' ) :type", "list of tag values. If Key is InstanceIds , Values is a list", "client method to presign for :type Params: dict :param Params: The parameters normally", "instances, AWS Lambda functions, Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions", "hereby granted, free of charge, to any person obtaining a copy of this", "and arguments :type ClientMethod: string :param ClientMethod: The client method to presign for", "is used for extracting part of the matched event when passing it to", "EC2 instances with one rule, you can use the RunCommandParameters field. To be", "{ 'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': {", "targets might continue to be invoked. Please allow a short period of time", "state machines, and built-in targets. Note that creating rules with built-in targets is", "resources, CloudWatch Events relies on resource-based policies. For EC2 instances, Amazon Kinesis streams,", "you can delete the rule. When you delete a rule, incoming events might", "not bracket notation. For more information about JSON paths, see JSONPath . InputTransformer", "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,", "\"\"\" pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified event pattern matches", "delete a rule, incoming events might continue to match to the deleted rule.", "'Resources': [ 'string', ], 'DetailType': 'string', 'Detail': 'string' }, ] ) :type Entries:", "'string', ], 'DetailType': 'string', 'Detail': 'string' }, ] ) :type Entries: list :param", "list :param Targets: [REQUIRED] The targets to update or add to the rule.", "specified rule. When the rule is triggered, those targets are no longer be", "the failed target and the error code. See also: AWS API Documentation :example:", "(list) -- [REQUIRED] The targets to update or add to the rule. (dict)", "PutTarget . For more information, see Authentication and Access Control in the Amazon", "test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified event pattern matches the provided event.", "response = client.delete_rule( Name='string' ) :type Name: string :param Name: [REQUIRED] The name", "pass def test_event_pattern(EventPattern=None, Event=None): \"\"\" Tests whether the specified event pattern matches the", "more information about JSON paths, see JSONPath . InputTransformer (dict) --Settings to enable", "The name of the rule. :rtype: dict :return: { 'Name': 'string', 'Arn': 'string',", "rule, incoming events might not immediately start matching to a newly enabled rule.", "of time for changes to take effect. A rule must contain at least", "matching event is observed. Rules with ScheduleExpressions self-trigger based on the given schedule.", "entry such as the source and type of the event, resources associated with", "maximum number of results to return. :rtype: dict :return: { 'RuleNames': [ 'string',", "rule. \"\"\" pass def enable_rule(Name=None): \"\"\" Enables the specified rule. If the rule", "a schedule expression. When you disable a rule, incoming events might continue to", "to match to the disabled rule. Please allow a short period of time", "If Key is InstanceIds , Values is a list of Amazon EC2 instance", "-- EcsParameters (dict) --Contains the Amazon ECS task definition and task count to", "can provide a prefix to match to the rule names. See also: AWS", "{ 'TargetId': 'string', 'ErrorCode': 'string', 'ErrorMessage': 'string' }, ] } :returns: Rule (string)", "'Id': 'string', 'Arn': 'string', 'RoleArn': 'string', 'Input': 'string', 'InputPath': 'string', 'InputTransformer': { 'InputPathsMap':", "functions, Amazon Kinesis streams, Amazon ECS tasks, AWS Step Functions state machines, and", "test against the event pattern. :rtype: dict :return: { 'Result': True|False } \"\"\"" ]
[ "'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) =", "'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', }", "= psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value in tables.iteritems(): db_cursor.execute('''select", "import psycopg2 from optparse import OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily',", "} #change these settings db_user = 'zabbix' db_pw = '<PASSWORD>' db = 'zabbix'", "init = 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key,", "{ 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly',", "'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user = 'zabbix' db_pw", "'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly',", "'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user = 'zabbix' db_pw = '<PASSWORD>'", "\"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if options.init: init = 1", "'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user = 'zabbix' db_pw =", "0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value in", "= OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if options.init:", "'<PASSWORD>' db = 'zabbix' db_host = 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\",", "import OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily',", "= { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly',", "<gh_stars>100-1000 #!/usr/bin/python import psycopg2 from optparse import OptionParser tables = { 'history':'daily', 'history_sync':'daily',", "= '<PASSWORD>' db = 'zabbix' db_host = 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\",", "= 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args)", "options.init: init = 1 else: init = 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host)", "db = 'zabbix' db_host = 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning", "'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly',", "password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value in tables.iteritems(): db_cursor.execute('''select create_zbx_partitions(%s,%s,%s)''',[table_key,table_value,init]) db_connection.commit() db_cursor.close()", "'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user", "parser.parse_args() if options.init: init = 1 else: init = 0 db_connection = psycopg2.connect(database=db,", "'service_alarms':'monthly', } #change these settings db_user = 'zabbix' db_pw = '<PASSWORD>' db =", "OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly',", "dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if options.init: init = 1 else:", "tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly',", "db_pw = '<PASSWORD>' db = 'zabbix' db_host = 'localhost' ##### parser = OptionParser()", "'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change", "'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly',", "##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args()", "OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if options.init: init", "= 1 else: init = 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor =", "= parser.parse_args() if options.init: init = 1 else: init = 0 db_connection =", "else: init = 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for", "'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings", "psycopg2 from optparse import OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily',", "'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user = 'zabbix' db_pw = '<PASSWORD>' db", "'zabbix' db_host = 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False)", "(options, args) = parser.parse_args() if options.init: init = 1 else: init = 0", "'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user = 'zabbix'", "if options.init: init = 1 else: init = 0 db_connection = psycopg2.connect(database=db, user=db_user,", "= 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value", "'zabbix' db_pw = '<PASSWORD>' db = 'zabbix' db_host = 'localhost' ##### parser =", "'history_str_sync':'daily', 'history_log':'daily', 'history_text':'daily', 'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these", "args) = parser.parse_args() if options.init: init = 1 else: init = 0 db_connection", "init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if options.init: init = 1 else: init", "#change these settings db_user = 'zabbix' db_pw = '<PASSWORD>' db = 'zabbix' db_host", "settings db_user = 'zabbix' db_pw = '<PASSWORD>' db = 'zabbix' db_host = 'localhost'", "from optparse import OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily',", "parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if", "db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value in tables.iteritems():", "db_host = 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options,", "'trends':'monthly', 'trends_uint':'monthly', 'acknowledges':'monthly', 'alerts':'monthly', 'auditlog':'monthly', 'events':'monthly', 'service_alarms':'monthly', } #change these settings db_user =", "db_cursor = db_connection.cursor() for table_key, table_value in tables.iteritems(): db_cursor.execute('''select create_zbx_partitions(%s,%s,%s)''',[table_key,table_value,init]) db_connection.commit() db_cursor.close() db_connection.close()", "= 'zabbix' db_host = 'localhost' ##### parser = OptionParser() parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\",", "user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value in tables.iteritems(): db_cursor.execute('''select create_zbx_partitions(%s,%s,%s)''',[table_key,table_value,init]) db_connection.commit()", "these settings db_user = 'zabbix' db_pw = '<PASSWORD>' db = 'zabbix' db_host =", "parser.add_option(\"-i\", \"--init\", dest=\"init\",help=\"partitioning init\",action=\"store_true\", default=False) (options, args) = parser.parse_args() if options.init: init =", "psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor() for table_key, table_value in tables.iteritems(): db_cursor.execute('''select create_zbx_partitions(%s,%s,%s)''',[table_key,table_value,init])", "default=False) (options, args) = parser.parse_args() if options.init: init = 1 else: init =", "1 else: init = 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor = db_connection.cursor()", "#!/usr/bin/python import psycopg2 from optparse import OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily',", "= 'zabbix' db_pw = '<PASSWORD>' db = 'zabbix' db_host = 'localhost' ##### parser", "optparse import OptionParser tables = { 'history':'daily', 'history_sync':'daily', 'history_uint':'daily', 'history_uint_sync':'daily', 'history_str':'daily', 'history_str_sync':'daily', 'history_log':'daily',", "db_user = 'zabbix' db_pw = '<PASSWORD>' db = 'zabbix' db_host = 'localhost' #####", "init = 1 else: init = 0 db_connection = psycopg2.connect(database=db, user=db_user, password=<PASSWORD>,host=db_host) db_cursor" ]
[ "if x == 0: args.start = date.today().replace(day = 1) elif x < 0:", "description='Fetch cost explorer data from AWS and display and/or save it', usage='%(prog)s [options]',", "parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output", "= x) except: pass return args def main(): args = parse_args() if not", "= args.granularity, filter = args.filter, group_by = args.group_by, metrics = args.metrics ).to_df() if", "or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads,", "help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS", "AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by',", "main(): args = parse_args() if not args.display and not args.out: raise Exception('Not showing", "month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter',", "x == 0: args.start = date.today().replace(day = 1) elif x < 0: args.start", ").to_df() if args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out, index =", "= 1) elif x < 0: args.start = date.today() + timedelta(days = x)", "x = int(args.start) if x == 0: args.start = date.today().replace(day = 1) elif", "args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out, index", "= argparse.ArgumentParser( description='Fetch cost explorer data from AWS and display and/or save it',", "table') parser.add_argument('--out', help='File to store CSV in (not stored if not specified') args", "in (not stored if not specified') args = parser.parse_args() # Handle special cases", "args.display and not args.out: raise Exception('Not showing or saving output, no reason to", "data from AWS and display and/or save it', usage='%(prog)s [options]', epilog='Standard environment variables", "args.end, granularity = args.granularity, filter = args.filter, group_by = args.group_by, metrics = args.metrics", "raise Exception('Not showing or saving output, no reason to run') client = boto3.client('ce',", "parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression", "parser.add_argument('--out', help='File to store CSV in (not stored if not specified') args =", "aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end = args.end, granularity = args.granularity, filter =", "args.out, index = False, encoding = 'utf-8') print('Wrote csv to %s' % (args.out))", "import timedelta, date from pprint import pprint import aws_cost_explorer_converter def parse_args(): parser =", "current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)')", "def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer data from AWS and display", "if not args.display and not args.out: raise Exception('Not showing or saving output, no", "showing or saving output, no reason to run') client = boto3.client('ce', region_name='us-east-1') converted", "= args.out, index = False, encoding = 'utf-8') print('Wrote csv to %s' %", "granularity = args.granularity, filter = args.filter, group_by = args.group_by, metrics = args.metrics ).to_df()", "elif x < 0: args.start = date.today() + timedelta(days = x) except: pass", "AWS and display and/or save it', usage='%(prog)s [options]', epilog='Standard environment variables for AWS", "timedelta(days = x) except: pass return args def main(): args = parse_args() if", "environment variables for AWS connection information are supported' ) global args parser.add_argument('--start', help='Start", "output, no reason to run') client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client,", "client, start = args.start, end = args.end, granularity = args.granularity, filter = args.filter,", "cases of start try: x = int(args.start) if x == 0: args.start =", "= args.start, end = args.end, granularity = args.granularity, filter = args.filter, group_by =", "default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression", "pass return args def main(): args = parse_args() if not args.display and not", "special cases of start try: x = int(args.start) if x == 0: args.start", "type=json.loads, help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table')", "args def main(): args = parse_args() if not args.display and not args.out: raise", "= args.group_by, metrics = args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('') if args.out:", "json from datetime import timedelta, date from pprint import pprint import aws_cost_explorer_converter def", "Exception('Not showing or saving output, no reason to run') client = boto3.client('ce', region_name='us-east-1')", "if args.out: converted.to_csv(path_or_buf = args.out, index = False, encoding = 'utf-8') print('Wrote csv", "client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end =", "(truncated) output table') parser.add_argument('--out', help='File to store CSV in (not stored if not", "return args def main(): args = parse_args() if not args.display and not args.out:", "date; if a negative number, is taken as a delta from today; if", "not args.out: raise Exception('Not showing or saving output, no reason to run') client", "run') client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end", "date.today() + timedelta(days = x) except: pass return args def main(): args =", "= aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end = args.end, granularity = args.granularity, filter", "def main(): args = parse_args() if not args.display and not args.out: raise Exception('Not", "= boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end = args.end,", "\\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true',", "or saving output, no reason to run') client = boto3.client('ce', region_name='us-east-1') converted =", "saving output, no reason to run') client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter(", "if a negative number, is taken as a delta from today; if zero,", "filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[", "date from pprint import pprint import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch", "parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics", "AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File to store CSV", "region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end = args.end, granularity =", "= args.filter, group_by = args.group_by, metrics = args.metrics ).to_df() if args.display: print('Converted:') pprint(converted)", "boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end = args.end, granularity", "expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\",", "filter = args.filter, group_by = args.group_by, metrics = args.metrics ).to_df() if args.display: print('Converted:')", "print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out, index = False, encoding =", "import argparse import json from datetime import timedelta, date from pprint import pprint", "as a delta from today; if zero, then as the start of the", "converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start, end = args.end, granularity = args.granularity,", "pprint import pprint import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer", "import pprint import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer data", "supported' ) global args parser.add_argument('--start', help='Start date; if a negative number, is taken", "specified') args = parser.parse_args() # Handle special cases of start try: x =", "parse_args() if not args.display and not args.out: raise Exception('Not showing or saving output,", "metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS", "help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out',", "= args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out,", "date.today().replace(day = 1) elif x < 0: args.start = date.today() + timedelta(days =", "expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS documentation)')", "args.start = date.today().replace(day = 1) elif x < 0: args.start = date.today() +", "number, is taken as a delta from today; if zero, then as the", "DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics',", "+ timedelta(days = x) except: pass return args def main(): args = parse_args()", "timedelta, date from pprint import pprint import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser(", "(see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File to store", "1) elif x < 0: args.start = date.today() + timedelta(days = x) except:", "# Handle special cases of start try: x = int(args.start) if x ==", "connection information are supported' ) global args parser.add_argument('--start', help='Start date; if a negative", "type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by", "action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File to store CSV in (not stored", "then as the start of the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY',", "zero, then as the start of the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity',", "group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File", "documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads,", "type=json.loads, help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression,", "if not specified') args = parser.parse_args() # Handle special cases of start try:", "= parse_args() if not args.display and not args.out: raise Exception('Not showing or saving", "parser.add_argument('--start', help='Start date; if a negative number, is taken as a delta from", "(not stored if not specified') args = parser.parse_args() # Handle special cases of", "usage='%(prog)s [options]', epilog='Standard environment variables for AWS connection information are supported' ) global", "documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File to store CSV in", "group_by = args.group_by, metrics = args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('') if", "pprint import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer data from", "import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer data from AWS", "help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see", "import json from datetime import timedelta, date from pprint import pprint import aws_cost_explorer_converter", "are supported' ) global args parser.add_argument('--start', help='Start date; if a negative number, is", "variables for AWS connection information are supported' ) global args parser.add_argument('--start', help='Start date;", "(untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON", "HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'],", "from today; if zero, then as the start of the current month') parser.add_argument('--end',", "store CSV in (not stored if not specified') args = parser.parse_args() # Handle", "reason to run') client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start =", "start = args.start, end = args.end, granularity = args.granularity, filter = args.filter, group_by", "datetime import timedelta, date from pprint import pprint import aws_cost_explorer_converter def parse_args(): parser", "and display and/or save it', usage='%(prog)s [options]', epilog='Standard environment variables for AWS connection", "as the start of the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity,", "stored if not specified') args = parser.parse_args() # Handle special cases of start", "output table') parser.add_argument('--out', help='File to store CSV in (not stored if not specified')", "not specified') args = parser.parse_args() # Handle special cases of start try: x", "epilog='Standard environment variables for AWS connection information are supported' ) global args parser.add_argument('--start',", "global args parser.add_argument('--start', help='Start date; if a negative number, is taken as a", "MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see AWS documentation)')", "\"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display", "args = parser.parse_args() # Handle special cases of start try: x = int(args.start)", "no reason to run') client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start", "the start of the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY,", "except: pass return args def main(): args = parse_args() if not args.display and", "taken as a delta from today; if zero, then as the start of", "0: args.start = date.today() + timedelta(days = x) except: pass return args def", "display and/or save it', usage='%(prog)s [options]', epilog='Standard environment variables for AWS connection information", "import boto3 import argparse import json from datetime import timedelta, date from pprint", "pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out, index = False, encoding = 'utf-8')", "< 0: args.start = date.today() + timedelta(days = x) except: pass return args", "to store CSV in (not stored if not specified') args = parser.parse_args() #", "from pprint import pprint import aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost", "of the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or", "x < 0: args.start = date.today() + timedelta(days = x) except: pass return", "parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File to store CSV in (not", "args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out, index = False, encoding", "cost explorer data from AWS and display and/or save it', usage='%(prog)s [options]', epilog='Standard", "is taken as a delta from today; if zero, then as the start", "help='File to store CSV in (not stored if not specified') args = parser.parse_args()", "args.out: converted.to_csv(path_or_buf = args.out, index = False, encoding = 'utf-8') print('Wrote csv to", "default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter expression (see", "expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated) output table') parser.add_argument('--out', help='File to", "argparse import json from datetime import timedelta, date from pprint import pprint import", "and/or save it', usage='%(prog)s [options]', epilog='Standard environment variables for AWS connection information are", "boto3 import argparse import json from datetime import timedelta, date from pprint import", "explorer data from AWS and display and/or save it', usage='%(prog)s [options]', epilog='Standard environment", "args.granularity, filter = args.filter, group_by = args.group_by, metrics = args.metrics ).to_df() if args.display:", "parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads,", "help='Display (truncated) output table') parser.add_argument('--out', help='File to store CSV in (not stored if", "= parser.parse_args() # Handle special cases of start try: x = int(args.start) if", "args.group_by, metrics = args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf", "int(args.start) if x == 0: args.start = date.today().replace(day = 1) elif x <", "[options]', epilog='Standard environment variables for AWS connection information are supported' ) global args", "help='Start date; if a negative number, is taken as a delta from today;", "if zero, then as the start of the current month') parser.add_argument('--end', help='End date')", "metrics = args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf =", "and not args.out: raise Exception('Not showing or saving output, no reason to run')", "= args.end, granularity = args.granularity, filter = args.filter, group_by = args.group_by, metrics =", "the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY", "x) except: pass return args def main(): args = parse_args() if not args.display", "not args.display and not args.out: raise Exception('Not showing or saving output, no reason", "a negative number, is taken as a delta from today; if zero, then", "Handle special cases of start try: x = int(args.start) if x == 0:", "args.out: raise Exception('Not showing or saving output, no reason to run') client =", "to run') client = boto3.client('ce', region_name='us-east-1') converted = aws_cost_explorer_converter.CostExplorerConverter( client, start = args.start,", "aws_cost_explorer_converter def parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer data from AWS and", "= date.today().replace(day = 1) elif x < 0: args.start = date.today() + timedelta(days", "args.start = date.today() + timedelta(days = x) except: pass return args def main():", "save it', usage='%(prog)s [options]', epilog='Standard environment variables for AWS connection information are supported'", "(see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'')", "= int(args.start) if x == 0: args.start = date.today().replace(day = 1) elif x", "today; if zero, then as the start of the current month') parser.add_argument('--end', help='End", "CSV in (not stored if not specified') args = parser.parse_args() # Handle special", "0: args.start = date.today().replace(day = 1) elif x < 0: args.start = date.today()", "information are supported' ) global args parser.add_argument('--start', help='Start date; if a negative number,", "date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON filter", "parse_args(): parser = argparse.ArgumentParser( description='Fetch cost explorer data from AWS and display and/or", "for AWS connection information are supported' ) global args parser.add_argument('--start', help='Start date; if", "help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY or HOURLY (untested)') parser.add_argument('--filter', type=json.loads, help='JSON", "args.filter, group_by = args.group_by, metrics = args.metrics ).to_df() if args.display: print('Converted:') pprint(converted) print('')", "from AWS and display and/or save it', usage='%(prog)s [options]', epilog='Standard environment variables for", ") global args parser.add_argument('--start', help='Start date; if a negative number, is taken as", "args.start, end = args.end, granularity = args.granularity, filter = args.filter, group_by = args.group_by,", "AWS connection information are supported' ) global args parser.add_argument('--start', help='Start date; if a", "a delta from today; if zero, then as the start of the current", "argparse.ArgumentParser( description='Fetch cost explorer data from AWS and display and/or save it', usage='%(prog)s", "args = parse_args() if not args.display and not args.out: raise Exception('Not showing or", "of start try: x = int(args.start) if x == 0: args.start = date.today().replace(day", "negative number, is taken as a delta from today; if zero, then as", "\"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display', action='store_true', help='Display (truncated)", "= date.today() + timedelta(days = x) except: pass return args def main(): args", "parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON", "it', usage='%(prog)s [options]', epilog='Standard environment variables for AWS connection information are supported' )", "== 0: args.start = date.today().replace(day = 1) elif x < 0: args.start =", "end = args.end, granularity = args.granularity, filter = args.filter, group_by = args.group_by, metrics", "delta from today; if zero, then as the start of the current month')", "from datetime import timedelta, date from pprint import pprint import aws_cost_explorer_converter def parse_args():", "parser = argparse.ArgumentParser( description='Fetch cost explorer data from AWS and display and/or save", "start of the current month') parser.add_argument('--end', help='End date') parser.add_argument('--granularity', default='DAILY', help='Granularity, MONTHLY, DAILY", "parser.parse_args() # Handle special cases of start try: x = int(args.start) if x", "args parser.add_argument('--start', help='Start date; if a negative number, is taken as a delta", "converted.to_csv(path_or_buf = args.out, index = False, encoding = 'utf-8') print('Wrote csv to %s'", "print('') if args.out: converted.to_csv(path_or_buf = args.out, index = False, encoding = 'utf-8') print('Wrote", "help='JSON filter expression (see AWS documentation)') parser.add_argument('--metrics', type=json.loads, default=['UnblendedCost'], help='JSON metrics expression, eg", "if args.display: print('Converted:') pprint(converted) print('') if args.out: converted.to_csv(path_or_buf = args.out, index = False,", "eg \\'[ \"UnblendedCost\", \"NetUnblendedCost\"]\\'') parser.add_argument('--group-by', type=json.loads, help='JSON group_by expression (see AWS documentation)') parser.add_argument('--display',", "try: x = int(args.start) if x == 0: args.start = date.today().replace(day = 1)", "start try: x = int(args.start) if x == 0: args.start = date.today().replace(day =" ]
[ "synthetic_fields = [] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self):", "all persistent objects db_model = None primary_keys = ['id'] fields_no_update = [] #", "bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate entries cls.fields_no_update =", "['id'] fields_no_update = [] # dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation =", "return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method enables to modify the", "result.append(obj) return result @classmethod def is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id ==", "Unless required by applicable law or agreed to in writing, software # distributed", "except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {}", "@classmethod def modify_fields_to_db(cls, fields): \"\"\" This method enables to modify the fields and", "obj_base import six from neutron._i18n import _ from neutron.objects.db import api as obj_db_api", "def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters = [key", "method fetches object from DB and convert it to versioned object. :param context:", "result: result[field] = result.pop(field_db) return result @classmethod def get_object(cls, context, **kwargs): \"\"\" This", "data is inserted into DB. It uses the fields_need_translation dict with structure: {", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "the License. You may obtain # a copy of the License at #", "def is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields", "**kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls,", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", ") def get_updatable_fields(cls, fields): fields = fields.copy() for field in cls.fields_no_update: if field", "context, **kwargs): raise NotImplementedError() def create(self): raise NotImplementedError() def update(self): raise NotImplementedError() def", ":param kwargs: multiple primary keys defined key=value pairs :return: single object of NeutronDbObject", "db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context, **kwargs):", "obj @classmethod def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters", "if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context,", "message = _(\"Failed to create a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with", "context) obj.obj_reset_changes() return obj @classmethod def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def", "fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = []", "with the License. You may obtain # a copy of the License at", "self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self,", "fields and its content after data was fetched from DB. It uses the", "cls.fields_no_update: if field in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat,", "of fields from NeutronDbObject :return: modified dict of fields \"\"\" result = copy.deepcopy(dict(fields))", "= cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs", "{} for key in self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def update(self):", "None primary_keys = ['id'] fields_no_update = [] # dict with name mapping: {'field_name_in_object':", "from database :return: modified dict of DB values \"\"\" result = dict(db_obj) for", "in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields =", "obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for", "'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in", "obj.obj_reset_changes() return obj @classmethod def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls,", "db_obj in objs] for field in self.fields: for db_obj in db_objs: if field", "cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs =", "primary_keys = ['id'] fields_no_update = [] # dict with name mapping: {'field_name_in_object': 'field_name_in_db'}", "missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj =", "use this file except in compliance with the License. You may obtain #", "db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs] for field in self.fields: for db_obj", "return fields def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys())", "as obj_base import six from neutron._i18n import _ from neutron.objects.db import api as", "_(\"Failed to create a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with value(s) %(values)s\")", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "six from neutron._i18n import _ from neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException):", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "defined key=value pairs :return: single object of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys())", "with structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of object fetched from", "def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result = []", "def get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self): raise NotImplementedError() def update(self): raise", "implied. See the # License for the specific language governing permissions and limitations", "neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the", "'field_name_in_db' } :param fields: dict of fields from NeutronDbObject :return: modified dict of", "in self.fields: for db_obj in db_objs: if field in db_obj: setattr(self, field, db_obj[field])", "exception as obj_exc from oslo_utils import reflection from oslo_versionedobjects import base as obj_base", "db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This method", "= self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise", "get_object(cls, context, **kwargs): \"\"\" This method fetches object from DB and convert it", "fetches object from DB and convert it to versioned object. :param context: :param", "[] for db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result", "NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model,", "return result @classmethod def get_object(cls, context, **kwargs): \"\"\" This method fetches object from", "you may # not use this file except in compliance with the License.", "@classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod", "method enables to modify the fields and its content after data was fetched", "%(attributes)s with value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns,", "for key in kwargs if key not in cls.fields or key in cls.synthetic_fields]", "modify the fields and its content before data is inserted into DB. It", "db_obj in db_objs: if field in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod", "@classmethod def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters =", "@classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method enables to modify the fields and", "key in self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates =", "KIND, either express or implied. See the # License for the specific language", "if field in result: result[field_db] = result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj):", "attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing primary keys: \"", "for field in cls.fields_no_update: if field in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta)", "objs] for field in self.fields: for db_obj in db_objs: if field in db_obj:", "file except in compliance with the License. You may obtain # a copy", "before data is inserted into DB. It uses the fields_need_translation dict with structure:", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import exception as obj_exc from oslo_utils import reflection from oslo_versionedobjects import base as", "self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields = fields.copy() for field", "NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs)", "updates = self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self, db_obj)", "fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing primary keys:", "class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a duplicate %(object_type)s: \" \"for attribute(s)", "class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context,", "\" \"for attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__(", "update(self): raise NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases,", "object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a duplicate %(object_type)s:", "\"\"\" result = copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if field in result:", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self, db_obj) def delete(self): obj_db_api.delete_object(self._context,", "in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate entries", "'field_name_in_object': 'field_name_in_db' } :param fields: dict of fields from NeutronDbObject :return: modified dict", "result[field_db] = result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method enables", "def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields):", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "import _ from neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable", "NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s", "hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject)", "return (context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field", "keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys )", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields = fields.copy() for field in cls.fields_no_update:", "specific language governing permissions and limitations # under the License. import abc import", "is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields =", "== db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in self.synthetic_fields: if field", "def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name,", "the # License for the specific language governing permissions and limitations # under", "raise NotImplementedError() def update(self): raise NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This method enables to", "get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "cls).__init__(name, bases, dct) for base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update +=", "db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls, context,", "(context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in", "dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs): db_objs", "in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This", "method enables to modify the fields and its content before data is inserted", "required by applicable law or agreed to in writing, software # distributed under", "= self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self, db_obj) def", "and its content before data is inserted into DB. It uses the fields_need_translation", "= obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def", "fields: del fields[field] return fields def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates =", "[self.modify_fields_from_db(db_obj) for db_obj in objs] for field in self.fields: for db_obj in db_objs:", "applicable law or agreed to in writing, software # distributed under the License", "This method enables to modify the fields and its content after data was", "fields.copy() for field in cls.fields_no_update: if field in fields: del fields[field] return fields", "**kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters = [key for key in", "or key in cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters) msg = _(\"'%s'", "result @classmethod def get_object(cls, context, **kwargs): \"\"\" This method fetches object from DB", ":return: modified dict of DB values \"\"\" result = dict(db_obj) for field, field_db", "in compliance with the License. You may obtain # a copy of the", "db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for", "or agreed to in writing, software # distributed under the License is distributed", "return result @classmethod def is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id)", "**kwargs): raise NotImplementedError() def create(self): raise NotImplementedError() def update(self): raise NotImplementedError() def delete(self):", "from neutron_lib import exceptions from oslo_db import exception as obj_exc from oslo_utils import", "= copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if field in result: result[field_db] =", "obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result", "= obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj", "in cls.fields or key in cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters) msg", "= _(\"For class %(object_type)s missing primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys):", "This method fetches object from DB and convert it to versioned object. :param", "db_obj): \"\"\" This method enables to modify the fields and its content after", "dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'):", "from DB and convert it to versioned object. :param context: :param kwargs: multiple", "the fields and its content before data is inserted into DB. It uses", "return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj", "\"\"\" This method enables to modify the fields and its content after data", "result[field] = result.pop(field_db) return result @classmethod def get_object(cls, context, **kwargs): \"\"\" This method", "import abc import copy import itertools from neutron_lib import exceptions from oslo_db import", "neutron_lib import exceptions from oslo_db import exception as obj_exc from oslo_utils import reflection", "import itertools from neutron_lib import exceptions from oslo_db import exception as obj_exc from", "= list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden for all persistent objects", "self.fields: for db_obj in db_objs: if field in db_obj: setattr(self, field, db_obj[field]) break", "fields def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if", "\" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def", "in result: result[field] = result.pop(field_db) return result @classmethod def get_object(cls, context, **kwargs): \"\"\"", "return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None,", "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "writing, software # distributed under the License is distributed on an \"AS IS\"", "key in kwargs if key not in cls.fields or key in cls.synthetic_fields] if", "base as obj_base import six from neutron._i18n import _ from neutron.objects.db import api", "**kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive,", "cls.fields_no_update += base.primary_keys # avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject):", "single object of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__,", "self.obj_get_changes() for field in self.synthetic_fields: if field in fields: del fields[field] return fields", "exceptions from oslo_db import exception as obj_exc from oslo_utils import reflection from oslo_versionedobjects", "return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return", "from DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' }", "fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def", "NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters = [key for key in kwargs if", "if field in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject):", "= self.obj_get_changes() for field in self.synthetic_fields: if field in fields: del fields[field] return", "structure: { 'field_name_in_object': 'field_name_in_db' } :param fields: dict of fields from NeutronDbObject :return:", "validate_filters(cls, **kwargs): bad_filters = [key for key in kwargs if key not in", "= [key for key in kwargs if key not in cls.fields or key", "def update(self): raise NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name,", "fields and its content before data is inserted into DB. It uses the", "{} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs] for field", ":return: single object of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise", "as obj_exc from oslo_utils import reflection from oslo_versionedobjects import base as obj_base import", "missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields = fields.copy()", "context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def get_object(cls, context, **kwargs):", "NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject,", "= self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys())", "raise NotImplementedError() def create(self): raise NotImplementedError() def update(self): raise NotImplementedError() def delete(self): raise", "attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False),", "= [] # dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def", "@classmethod def get_object(cls, context, **kwargs): \"\"\" This method fetches object from DB and", "fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param fields: dict of fields", "from neutron._i18n import _ from neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message", "{'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj", "%(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest):", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "= _(\"'%s' is not supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod", "db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return", "dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param fields: dict of fields from", "**kwargs): bad_filters = [key for key in kwargs if key not in cls.fields", "context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in self.synthetic_fields: if", "DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base in", "if field in fields: del fields[field] return fields def _validate_changed_fields(self, fields): fields =", "fields \"\"\" result = copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if field in", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "__init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base in itertools.chain([cls], bases):", "= obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for db_obj in db_objs: obj =", "db_objs: if field in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls,", "permissions and limitations # under the License. import abc import copy import itertools", "name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj)", "return fields def create(self): fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields))", "in self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields()", "2.0 (the \"License\"); you may # not use this file except in compliance", "_get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in self.synthetic_fields: if field in fields: del", "if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj", "} :param fields: dict of fields from NeutronDbObject :return: modified dict of fields", "for field, field_db in cls.fields_need_translation.items(): if field_db in result: result[field] = result.pop(field_db) return", "return obj @classmethod def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs):", "@classmethod def validate_filters(cls, **kwargs): bad_filters = [key for key in kwargs if key", "fields: dict of fields from NeutronDbObject :return: modified dict of fields \"\"\" result", "field in self.synthetic_fields: if field in fields: del fields[field] return fields def _validate_changed_fields(self,", "data was fetched from DB. It uses the fields_need_translation dict with structure: {", "= _(\"Unable to update the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message =", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "cls.fields_need_translation.items(): if field_db in result: result[field] = result.pop(field_db) return result @classmethod def get_object(cls,", "= result.pop(field_db) return result @classmethod def get_object(cls, context, **kwargs): \"\"\" This method fetches", "cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod", "NotImplementedError() def create(self): raise NotImplementedError() def update(self): raise NotImplementedError() def delete(self): raise NotImplementedError()", "in cls.fields_need_translation.items(): if field_db in result: result[field] = result.pop(field_db) return result @classmethod def", "update the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create", "db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for key in self.primary_keys: keys[key] =", "not in cls.fields or key in cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters)", "api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the following object", "License, Version 2.0 (the \"License\"); you may # not use this file except", "\"\"\" This method enables to modify the fields and its content before data", "forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields = self._get_changed_persistent_fields() try: db_obj =", "NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for", "fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self,", "persistent objects db_model = None primary_keys = ['id'] fields_no_update = [] # dict", "message = _(\"For class %(object_type)s missing primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class,", "updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self, db_obj) def delete(self): obj_db_api.delete_object(self._context, self.db_model,", "for field in self.fields: for db_obj in db_objs: if field in db_obj: setattr(self,", "primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys", "dict of DB values \"\"\" result = dict(db_obj) for field, field_db in cls.fields_need_translation.items():", "fields): fields = fields.copy() for field in cls.fields_no_update: if field in fields: del", "@six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None, **kwargs): super(NeutronObject,", "and limitations # under the License. import abc import copy import itertools from", "for all persistent objects db_model = None primary_keys = ['id'] fields_no_update = []", "db_obj: dict of object fetched from database :return: modified dict of DB values", "database :return: modified dict of DB values \"\"\" result = dict(db_obj) for field,", "# dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs):", "from oslo_utils import reflection from oslo_versionedobjects import base as obj_base import six from", "%(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a duplicate %(object_type)s: \" \"for", "field_db in cls.fields_need_translation.items(): if field in result: result[field_db] = result.pop(field) return result @classmethod", "structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of object fetched from database", "avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden", "missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields = fields.copy() for field in cls.fields_no_update: if", "its content before data is inserted into DB. It uses the fields_need_translation dict", "cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for db_obj in db_objs:", "agreed to in writing, software # distributed under the License is distributed on", "from oslo_db import exception as obj_exc from oslo_utils import reflection from oslo_versionedobjects import", "obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id", "= [] for db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return", "raise NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct):", "set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj:", "in cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters) msg = _(\"'%s' is not", "list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden for all persistent objects db_model", "keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates =", "fetched from DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db'", "was fetched from DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object':", "db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self, db_obj) def delete(self): obj_db_api.delete_object(self._context, self.db_model, **self._get_composite_keys())", "from neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update", "# Unless required by applicable law or agreed to in writing, software #", "by applicable law or agreed to in writing, software # distributed under the", "following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a duplicate", "fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc:", "if key not in cls.fields or key in cls.synthetic_fields] if bad_filters: bad_filters =", "for db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self): raise", "__init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def", "cls.db_model, **kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def", "into DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' }", "result = dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if field_db in result: result[field]", "exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self): raise NotImplementedError()", "NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict):", "as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the following object fields:", "msg = _(\"'%s' is not supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod", "db_model = None primary_keys = ['id'] fields_no_update = [] # dict with name", "\"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls,", "fields def create(self): fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except", "to modify the fields and its content after data was fetched from DB.", "missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs)", "inserted into DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db'", "@classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self): raise NotImplementedError() def", "update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model,", "in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls,", "except in compliance with the License. You may obtain # a copy of", "context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters = [key for key", "context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for db_obj", "def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message", "the License. import abc import copy import itertools from neutron_lib import exceptions from", "def _get_composite_keys(self): keys = {} for key in self.primary_keys: keys[key] = getattr(self, key)", "cls.fields_need_translation.items(): if field in result: result[field_db] = result.pop(field) return result @classmethod def modify_fields_from_db(cls,", "def modify_fields_from_db(cls, db_obj): \"\"\" This method enables to modify the fields and its", "if field_db in result: result[field] = result.pop(field_db) return result @classmethod def get_object(cls, context,", "in fields: del fields[field] return fields def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates", "to in writing, software # distributed under the License is distributed on an", "duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self, object_class, db_exception):", "field_db in result: result[field] = result.pop(field_db) return result @classmethod def get_object(cls, context, **kwargs):", "db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This method enables to modify", "raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for key in self.primary_keys:", "db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for db_obj in db_objs: obj", "%(object_type)s missing primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class,", "oslo_utils import reflection from oslo_versionedobjects import base as obj_base import six from neutron._i18n", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "= set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if", "NeutronDbObject :return: modified dict of fields \"\"\" result = copy.deepcopy(dict(fields)) for field, field_db", "keys defined key=value pairs :return: single object of NeutronDbObject class \"\"\" missing_keys =", "_ from neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to", "= ', '.join(bad_filters) msg = _(\"'%s' is not supported for filtering\") % bad_filters", "object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For", "fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates)", "class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base", "# not use this file except in compliance with the License. You may", "with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs): db_objs =", "db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for key in", "= _(\"Failed to create a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with value(s)", "# License for the specific language governing permissions and limitations # under the", "@classmethod def is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self):", "obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod", "fields = fields.copy() for field in cls.fields_no_update: if field in fields: del fields[field]", "object. :param context: :param kwargs: multiple primary keys defined key=value pairs :return: single", "result.pop(field_db) return result @classmethod def get_object(cls, context, **kwargs): \"\"\" This method fetches object", "', '.join(bad_filters) msg = _(\"'%s' is not supported for filtering\") % bad_filters raise", "context, db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes()", "get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self): raise NotImplementedError() def update(self): raise NotImplementedError()", "for base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid", "= ['id'] fields_no_update = [] # dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation", "DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param", "the fields and its content after data was fetched from DB. It uses", "dct) for base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys #", "[] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items())", "its content after data was fetched from DB. It uses the fields_need_translation dict", "fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a duplicate %(object_type)s: \"", "import six from neutron._i18n import _ from neutron.objects.db import api as obj_db_api class", "of DB values \"\"\" result = dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if", "obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for db_obj in db_objs: obj = cls(context,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields =", "def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod", "in writing, software # distributed under the License is distributed on an \"AS", "def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates:", "Version 2.0 (the \"License\"); you may # not use this file except in", "fields[field] return fields def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update) &", "clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def get_object(cls,", "dict of fields \"\"\" result = copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if", "super(DeclarativeObject, cls).__init__(name, bases, dct) for base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update", "del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def", "raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj = cls(context,", "\"License\"); you may # not use this file except in compliance with the", "= [self.modify_fields_from_db(db_obj) for db_obj in objs] for field in self.fields: for db_obj in", "get_object(cls, context, **kwargs): raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters = [key for", "class NeutronDbObject(NeutronObject): # should be overridden for all persistent objects db_model = None", "'field_name_in_db' } :param db_obj: dict of object fetched from database :return: modified dict", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context)", "values \"\"\" result = dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if field_db in", "obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self):", "self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive,", "overridden for all persistent objects db_model = None primary_keys = ['id'] fields_no_update =", "**kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj =", "for db_obj in objs] for field in self.fields: for db_obj in db_objs: if", "is not supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls,", "cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls, context, db_obj): return (context.is_admin", "objects db_model = None primary_keys = ['id'] fields_no_update = [] # dict with", "the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of", "self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates), **self._get_composite_keys()) self.from_db_object(self, db_obj) def delete(self):", "create(self): raise NotImplementedError() def update(self): raise NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta):", "not use this file except in compliance with the License. You may obtain", "supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs):", "def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in self.synthetic_fields: if field in fields:", "name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base in itertools.chain([cls], bases): if", "result = [] for db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj)", "of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj", "def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs] for field in", "in self.synthetic_fields: if field in fields: del fields[field] return fields def _validate_changed_fields(self, fields):", "License for the specific language governing permissions and limitations # under the License.", "\"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model,", "self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for key in self.primary_keys: keys[key] = getattr(self,", "_validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise", "a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self, object_class,", "governing permissions and limitations # under the License. import abc import copy import", "primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def get_object(cls, context,", "= fields.copy() for field in cls.fields_no_update: if field in fields: del fields[field] return", "object fetched from database :return: modified dict of DB values \"\"\" result =", "self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates", "NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj =", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class", "# avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be", "object of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys)", "obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def", "dict of object fetched from database :return: modified dict of DB values \"\"\"", "License. import abc import copy import itertools from neutron_lib import exceptions from oslo_db", "entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden for all", "OF ANY KIND, either express or implied. See the # License for the", "NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj))", "field in result: result[field_db] = result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\"", "'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class", "[] # dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self,", "It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param fields:", "raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct)", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing primary keys: \" \"%(missing_keys)s\")", "dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of object fetched", "super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields = fields.copy() for", "{ 'field_name_in_object': 'field_name_in_db' } :param fields: dict of fields from NeutronDbObject :return: modified", "obj_exc from oslo_utils import reflection from oslo_versionedobjects import base as obj_base import six", "import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the following", "neutron._i18n import _ from neutron.objects.db import api as obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message =", "del fields[field] return fields def _validate_changed_fields(self, fields): fields = fields.copy() forbidden_updates = set(self.fields_no_update)", "field in self.fields: for db_obj in db_objs: if field in db_obj: setattr(self, field,", "(the \"License\"); you may # not use this file except in compliance with", "and its content after data was fetched from DB. It uses the fields_need_translation", "modify_fields_to_db(cls, fields): \"\"\" This method enables to modify the fields and its content", "super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s", "object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields = fields.copy() for field in", "# # Unless required by applicable law or agreed to in writing, software", "= dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if field_db in result: result[field] =", "db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class", "def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def", "base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate", "fetched from database :return: modified dict of DB values \"\"\" result = dict(db_obj)", "or context.tenant_id == db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in self.synthetic_fields:", "dict of fields from NeutronDbObject :return: modified dict of fields \"\"\" result =", "obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model,", "field_db in cls.fields_need_translation.items(): if field_db in result: result[field] = result.pop(field_db) return result @classmethod", "+= base.primary_keys # avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): #", "key not in cls.fields or key in cls.synthetic_fields] if bad_filters: bad_filters = ',", "\"for attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class,", "self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys", "self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj", ":param db_obj: dict of object fetched from database :return: modified dict of DB", "enables to modify the fields and its content after data was fetched from", "of object fetched from database :return: modified dict of DB values \"\"\" result", "obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs)", "License. You may obtain # a copy of the License at # #", "= {} for key in self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "field in fields: del fields[field] return fields def _validate_changed_fields(self, fields): fields = fields.copy()", "field in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields", "_get_composite_keys(self): keys = {} for key in self.primary_keys: keys[key] = getattr(self, key) return", "def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base in itertools.chain([cls],", ":param context: :param kwargs: multiple primary keys defined key=value pairs :return: single object", "base.primary_keys # avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should", "to versioned object. :param context: :param kwargs: multiple primary keys defined key=value pairs", "It uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj:", "context: :param kwargs: multiple primary keys defined key=value pairs :return: single object of", "self.synthetic_fields: if field in fields: del fields[field] return fields def _validate_changed_fields(self, fields): fields", "ANY KIND, either express or implied. See the # License for the specific", "= None primary_keys = ['id'] fields_no_update = [] # dict with name mapping:", "to update the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to", "obj_db_api class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the following object fields: %(fields)s\")", "= cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def get_object(cls, context, **kwargs): raise NotImplementedError()", "after data was fetched from DB. It uses the fields_need_translation dict with structure:", "multiple primary keys defined key=value pairs :return: single object of NeutronDbObject class \"\"\"", "filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError()", "for key in self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates", "copy import itertools from neutron_lib import exceptions from oslo_db import exception as obj_exc", "= getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates)", "fields): \"\"\" This method enables to modify the fields and its content before", "from NeutronDbObject :return: modified dict of fields \"\"\" result = copy.deepcopy(dict(fields)) for field,", "} :param db_obj: dict of object fetched from database :return: modified dict of", "# under the License. import abc import copy import itertools from neutron_lib import", "NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for key in self.primary_keys: keys[key]", "updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context, self.db_model, self.modify_fields_to_db(updates),", "copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if field in result: result[field_db] = result.pop(field)", "obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls, context, db_obj):", "fields_no_update = [] # dict with name mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {}", "the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param fields: dict of", "limitations # under the License. import abc import copy import itertools from neutron_lib", "break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This method enables to modify the", "import reflection from oslo_versionedobjects import base as obj_base import six from neutron._i18n import", "modified dict of fields \"\"\" result = copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items():", "self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing", "NotImplementedError() def update(self): raise NotImplementedError() def delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls,", "{ 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of object fetched from database :return:", "DB values \"\"\" result = dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if field_db", "= set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields", "with structure: { 'field_name_in_object': 'field_name_in_db' } :param fields: dict of fields from NeutronDbObject", "for field in self.synthetic_fields: if field in fields: del fields[field] return fields def", "This method enables to modify the fields and its content before data is", "@classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result =", "in kwargs if key not in cls.fields or key in cls.synthetic_fields] if bad_filters:", "bases, dct) for base in itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys", "**kwargs) result = [] for db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes()", "missing primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False),", "not supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context,", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate entries cls.fields_no_update = list(set(cls.fields_no_update))", "be overridden for all persistent objects db_model = None primary_keys = ['id'] fields_no_update", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "field in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\"", "import exceptions from oslo_db import exception as obj_exc from oslo_utils import reflection from", "import copy import itertools from neutron_lib import exceptions from oslo_db import exception as", "obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults()", "= [] def __init__(self, context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return", "modify the fields and its content after data was fetched from DB. It", "bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self):", "in cls.fields_no_update: if field in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject,", "context=None, **kwargs): super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls,", "create(self): fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as", "__init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message =", "class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys: raise NeutronPrimaryKeyMissing(object_class=cls.__class__, missing_keys=missing_keys) db_obj = obj_db_api.get_object(context,", "fields = self.obj_get_changes() for field in self.synthetic_fields: if field in fields: del fields[field]", "See the # License for the specific language governing permissions and limitations #", "as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys = {} for key", "object from DB and convert it to versioned object. :param context: :param kwargs:", "NeutronDbObject(NeutronObject): # should be overridden for all persistent objects db_model = None primary_keys", "db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj)", "oslo_db import exception as obj_exc from oslo_utils import reflection from oslo_versionedobjects import base", "field in cls.fields_no_update: if field in fields: del fields[field] return fields @six.add_metaclass(abc.ABCMeta) class", "under the License. import abc import copy import itertools from neutron_lib import exceptions", "in result: result[field_db] = result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This", "law or agreed to in writing, software # distributed under the License is", "primary keys defined key=value pairs :return: single object of NeutronDbObject class \"\"\" missing_keys", "from oslo_versionedobjects import base as obj_base import six from neutron._i18n import _ from", "context, **kwargs): \"\"\" This method fetches object from DB and convert it to", "to modify the fields and its content before data is inserted into DB.", "express or implied. See the # License for the specific language governing permissions", "raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context,", "DB and convert it to versioned object. :param context: :param kwargs: multiple primary", "self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This method enables to modify the fields", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "getattr(self, key) return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if", "if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields = self._get_changed_persistent_fields() try: db_obj", "result = copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if field in result: result[field_db]", "self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc) self.from_db_object(db_obj) def _get_composite_keys(self): keys =", "the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed to create a", "set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields = self._get_changed_persistent_fields() try:", "for the specific language governing permissions and limitations # under the License. import", "def create(self): raise NotImplementedError() def update(self): raise NotImplementedError() def delete(self): raise NotImplementedError() class", "object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value) class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing primary", "def validate_filters(cls, **kwargs): bad_filters = [key for key in kwargs if key not", "bad_filters = [key for key in kwargs if key not in cls.fields or", "enables to modify the fields and its content before data is inserted into", "fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of object", "**cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() return obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "@six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden for all persistent objects db_model =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "key=value pairs :return: single object of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if", "missing_keys=missing_keys) db_obj = obj_db_api.get_object(context, cls.db_model, **kwargs) if db_obj: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes()", "compliance with the License. You may obtain # a copy of the License", "kwargs if key not in cls.fields or key in cls.synthetic_fields] if bad_filters: bad_filters", "key in cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters) msg = _(\"'%s' is", "keys = {} for key in self.primary_keys: keys[key] = getattr(self, key) return self.modify_fields_to_db(keys)", "abc import copy import itertools from neutron_lib import exceptions from oslo_db import exception", "def get_object(cls, context, **kwargs): \"\"\" This method fetches object from DB and convert", "language governing permissions and limitations # under the License. import abc import copy", "try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__, db_exception=db_exc)", "bad_filters: bad_filters = ', '.join(bad_filters) msg = _(\"'%s' is not supported for filtering\")", "cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden for all persistent", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "oslo_versionedobjects import base as obj_base import six from neutron._i18n import _ from neutron.objects.db", "convert it to versioned object. :param context: :param kwargs: multiple primary keys defined", "if bad_filters: bad_filters = ', '.join(bad_filters) msg = _(\"'%s' is not supported for", "super(NeutronObject, self).__init__(context, **kwargs) self.obj_set_defaults() def to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def", "modified dict of DB values \"\"\" result = dict(db_obj) for field, field_db in", "the specific language governing permissions and limitations # under the License. import abc", "db_obj.tenant_id) def _get_changed_persistent_fields(self): fields = self.obj_get_changes() for field in self.synthetic_fields: if field in", "delete(self): raise NotImplementedError() class DeclarativeObject(abc.ABCMeta): def __init__(cls, name, bases, dct): super(DeclarativeObject, cls).__init__(name, bases,", "= result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method enables to", "uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param fields: dict", "should be overridden for all persistent objects db_model = None primary_keys = ['id']", "fields from NeutronDbObject :return: modified dict of fields \"\"\" result = copy.deepcopy(dict(fields)) for", "@abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError() def create(self): raise NotImplementedError() def update(self):", "import base as obj_base import six from neutron._i18n import _ from neutron.objects.db import", "may # not use this file except in compliance with the License. You", "# should be overridden for all persistent objects db_model = None primary_keys =", "class NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing primary keys: \" \"%(missing_keys)s\") def", "pairs :return: single object of NeutronDbObject class \"\"\" missing_keys = set(cls.primary_keys).difference(kwargs.keys()) if missing_keys:", "set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields =", "in db_objs: if field in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def", "& set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self): fields = self._get_changed_persistent_fields()", "either express or implied. See the # License for the specific language governing", "_(\"For class %(object_type)s missing primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing,", "= {} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs] for", "message = _(\"Unable to update the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message", "in objs] for field in self.fields: for db_obj in db_objs: if field in", "kwargs: multiple primary keys defined key=value pairs :return: single object of NeutronDbObject class", "*objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs] for field in self.fields: for", "if field in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields):", "this file except in compliance with the License. You may obtain # a", "bad_filters = ', '.join(bad_filters) msg = _(\"'%s' is not supported for filtering\") %", "= fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields", "dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if field_db in result: result[field] = result.pop(field_db)", "**kwargs): \"\"\" This method fetches object from DB and convert it to versioned", "**cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls, context, db_obj): return (context.is_admin or", "for db_obj in db_objs: if field in db_obj: setattr(self, field, db_obj[field]) break self.obj_reset_changes()", "or implied. See the # License for the specific language governing permissions and", "duplicate entries cls.fields_no_update = list(set(cls.fields_no_update)) @six.add_metaclass(DeclarativeObject) class NeutronDbObject(NeutronObject): # should be overridden for", "mapping: {'field_name_in_object': 'field_name_in_db'} fields_need_translation = {} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for", "bases, dct): super(DeclarativeObject, cls).__init__(name, bases, dct) for base in itertools.chain([cls], bases): if hasattr(base,", "\"\"\" result = dict(db_obj) for field, field_db in cls.fields_need_translation.items(): if field_db in result:", "raise NotImplementedError() @classmethod def validate_filters(cls, **kwargs): bad_filters = [key for key in kwargs", "cls.fields or key in cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters) msg =", "in cls.fields_need_translation.items(): if field in result: result[field_db] = result.pop(field) return result @classmethod def", "uses the fields_need_translation dict with structure: { 'field_name_in_object': 'field_name_in_db' } :param db_obj: dict", "cls.db_model, **kwargs) result = [] for db_obj in db_objs: obj = cls(context, **cls.modify_fields_from_db(db_obj))", "with value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), attributes=db_exception.columns, values=db_exception.value)", "itertools from neutron_lib import exceptions from oslo_db import exception as obj_exc from oslo_utils", "%(object_type)s: \" \"for attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self, object_class, db_exception): super(NeutronDbObjectDuplicateEntry,", "setattr(self, field, db_obj[field]) break self.obj_reset_changes() @classmethod def modify_fields_to_db(cls, fields): \"\"\" This method enables", "create a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with value(s) %(values)s\") def __init__(self,", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "def get_updatable_fields(cls, fields): fields = fields.copy() for field in cls.fields_no_update: if field in", "return obj @classmethod def get_objects(cls, context, **kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs)", "versioned object. :param context: :param kwargs: multiple primary keys defined key=value pairs :return:", "__init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__( object_type=reflection.get_class_name(object_class, fully_qualified=False), missing_keys=missing_keys ) def get_updatable_fields(cls, fields): fields", "dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj", "fields = fields.copy() forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return", "for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise", "'.join(bad_filters) msg = _(\"'%s' is not supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg)", "field, field_db in cls.fields_need_translation.items(): if field in result: result[field_db] = result.pop(field) return result", "fields @six.add_metaclass(abc.ABCMeta) class NeutronObject(obj_base.VersionedObject, obj_base.VersionedObjectDictCompat, obj_base.ComparableVersionedObject): synthetic_fields = [] def __init__(self, context=None, **kwargs):", "field, field_db in cls.fields_need_translation.items(): if field_db in result: result[field] = result.pop(field_db) return result", "obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes() return obj @classmethod def get_object(cls, context, **kwargs): raise", "[key for key in kwargs if key not in cls.fields or key in", "_(\"'%s' is not supported for filtering\") % bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def", ":return: modified dict of fields \"\"\" result = copy.deepcopy(dict(fields)) for field, field_db in", "self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj =", "result @classmethod def is_accessible(cls, context, db_obj): return (context.is_admin or context.tenant_id == db_obj.tenant_id) def", "% bad_filters raise exceptions.InvalidInput(error_message=msg) @classmethod @abc.abstractmethod def get_objects(cls, context, **kwargs): raise NotImplementedError() def", "result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method enables to modify the fields", "key) return self.modify_fields_to_db(keys) def update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates:", "from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs] for field in self.fields:", "forbidden_updates = set(self.fields_no_update) & set(fields.keys()) if forbidden_updates: raise NeutronObjectUpdateForbidden(fields=forbidden_updates) return fields def create(self):", "content before data is inserted into DB. It uses the fields_need_translation dict with", "= cls(context, **cls.modify_fields_from_db(db_obj)) obj.obj_reset_changes() result.append(obj) return result @classmethod def is_accessible(cls, context, db_obj): return", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "content after data was fetched from DB. It uses the fields_need_translation dict with", "self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry as db_exc: raise NeutronDbObjectDuplicateEntry(object_class=self.__class__,", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "class NeutronObjectUpdateForbidden(exceptions.NeutronException): message = _(\"Unable to update the following object fields: %(fields)s\") class", ":param fields: dict of fields from NeutronDbObject :return: modified dict of fields \"\"\"", "reflection from oslo_versionedobjects import base as obj_base import six from neutron._i18n import _", "NeutronPrimaryKeyMissing(exceptions.BadRequest): message = _(\"For class %(object_type)s missing primary keys: \" \"%(missing_keys)s\") def __init__(self,", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "def update(self): updates = self._get_changed_persistent_fields() updates = self._validate_changed_fields(updates) if updates: db_obj = obj_db_api.update_object(self._context,", "result: result[field_db] = result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method", "itertools.chain([cls], bases): if hasattr(base, 'primary_keys'): cls.fields_no_update += base.primary_keys # avoid duplicate entries cls.fields_no_update", "is inserted into DB. It uses the fields_need_translation dict with structure: { 'field_name_in_object':", "to create a duplicate %(object_type)s: \" \"for attribute(s) %(attributes)s with value(s) %(values)s\") def", "fields_need_translation = {} def from_db_object(self, *objs): db_objs = [self.modify_fields_from_db(db_obj) for db_obj in objs]", "and convert it to versioned object. :param context: :param kwargs: multiple primary keys", "get_updatable_fields(cls, fields): fields = fields.copy() for field in cls.fields_no_update: if field in fields:", "of fields \"\"\" result = copy.deepcopy(dict(fields)) for field, field_db in cls.fields_need_translation.items(): if field", "modify_fields_from_db(cls, db_obj): \"\"\" This method enables to modify the fields and its content", "**kwargs): cls.validate_filters(**kwargs) db_objs = obj_db_api.get_objects(context, cls.db_model, **kwargs) result = [] for db_obj in", "'field_name_in_object': 'field_name_in_db' } :param db_obj: dict of object fetched from database :return: modified", "cls.synthetic_fields] if bad_filters: bad_filters = ', '.join(bad_filters) msg = _(\"'%s' is not supported", "for field, field_db in cls.fields_need_translation.items(): if field in result: result[field_db] = result.pop(field) return", "result.pop(field) return result @classmethod def modify_fields_from_db(cls, db_obj): \"\"\" This method enables to modify", "def modify_fields_to_db(cls, fields): \"\"\" This method enables to modify the fields and its", "def create(self): fields = self._get_changed_persistent_fields() try: db_obj = obj_db_api.create_object(self._context, self.db_model, self.modify_fields_to_db(fields)) except obj_exc.DBDuplicateEntry", "_(\"Unable to update the following object fields: %(fields)s\") class NeutronDbObjectDuplicateEntry(exceptions.Conflict): message = _(\"Failed", "class %(object_type)s missing primary keys: \" \"%(missing_keys)s\") def __init__(self, object_class, missing_keys): super(NeutronPrimaryKeyMissing, self).__init__(", "it to versioned object. :param context: :param kwargs: multiple primary keys defined key=value", "\"\"\" This method fetches object from DB and convert it to versioned object.", "to_dict(self): return dict(self.items()) @classmethod def clean_obj_from_primitive(cls, primitive, context=None): obj = cls.obj_from_primitive(primitive, context) obj.obj_reset_changes()" ]
[ "from setuptools import setup, find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials =", "version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli',", "azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], }, packages=find_packages(),", "setup, find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate =", "'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS", "name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload =", "'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove", "'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], }, packages=find_packages(), include_package_data=True,", "[ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli',", "= azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier =", "'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete =", "'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier", "= azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], }, packages=find_packages(), include_package_data=True, author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/OLC-LOC-Bioinformatics/AzureStorage\", )", "setuptools import setup, find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli',", "'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload", "find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli',", "'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList", "= azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], }, packages=find_packages(), include_package_data=True, author=\"<NAME>\",", "= azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload =", "'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], }, packages=find_packages(), include_package_data=True, author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/OLC-LOC-Bioinformatics/AzureStorage\",", "= azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], },", "import setup, find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate", "azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli'", "azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli',", "azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli',", "= azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove =", "= azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS =", "setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload", "= azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList =", "'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ],", "entry_points={ 'console_scripts': [ 'AzureCredentials = azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete", "azure_storage.azure_credentials:cli', 'AzureAutomate = azure_storage.azure_automate:cli', 'AzureDownload = azure_storage.azure_download:cli', 'AzureDelete = azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli',", "azure_storage.azure_delete:cli', 'AzureUpload = azure_storage.azure_upload:cli', 'AzureList = azure_storage.azure_list:cli', 'AzureMove = azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli',", "#!/usr/bin/env python from setuptools import setup, find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [", "python from setuptools import setup, find_packages setup( name=\"AzureStorage\", version=\"0.0.2\", entry_points={ 'console_scripts': [ 'AzureCredentials", "azure_storage.azure_move:cli', 'AzureTier = azure_storage.azure_tier:cli', 'AzureSAS = azure_storage.azure_sas:cli' ], }, packages=find_packages(), include_package_data=True, author=\"<NAME>\", author_email=\"<EMAIL>\"," ]
[ "from base.views.order_views import * urlpatterns = [ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'),", "path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/',", "name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/', updateOrderToDelivered, name='order-delivered'), path('<str:pk>/pay/', updateOrderToPaid, name='pay'),", "name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'),", "* urlpatterns = [ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'),", "= [ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders,", "import * urlpatterns = [ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView,", "from django.urls import path from base.views.order_views import * urlpatterns = [ path('', getOrders,", "django.urls import path from base.views.order_views import * urlpatterns = [ path('', getOrders, name='orders'),", "import path from base.views.order_views import * urlpatterns = [ path('', getOrders, name='orders'), path('add/',", "getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/', updateOrderToDelivered, name='order-delivered'), path('<str:pk>/pay/', updateOrderToPaid,", "urlpatterns = [ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/',", "path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/',", "base.views.order_views import * urlpatterns = [ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/',", "addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/', updateOrderToDelivered,", "path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/', updateOrderToDelivered, name='order-delivered'), path('<str:pk>/pay/', updateOrderToPaid, name='pay'), ]", "[ path('', getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'),", "name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/', updateOrderToDelivered, name='order-delivered'),", "path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById, name='get-order-by-id'), path('<str:pk>/deliver/', updateOrderToDelivered, name='order-delivered'), path('<str:pk>/pay/',", "getOrders, name='orders'), path('add/', addOrderItems, name='orders-add'), path('gettoken/', getTokenView, name='get-client-token'), path('myorders/', getMyOrders, name='myorders'), path('<str:pk>/', getOrderById,", "path from base.views.order_views import * urlpatterns = [ path('', getOrders, name='orders'), path('add/', addOrderItems," ]
[ "assert wf.name == \"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1 def", "\"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None:", "None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name ==", "str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(),", "requests from argo.workflows.dsl import Workflow from ._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture", "methods.\"\"\" fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response)", "flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind", "None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name == \"test\"", ") flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert", "Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test", "\"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None},", "None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda:", "Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\" assert", "isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates) ==", "\"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name", "test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (),", "fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def", "/ \"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf =", "wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind ==", "== \"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self, url:", "wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self, url: str) -> None:", "import requests from argo.workflows.dsl import Workflow from ._base import TestCase \"\"\"Workflow test suite.\"\"\"", "def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow)", "flexmock import pytest import requests from argo.workflows.dsl import Workflow from ._base import TestCase", "(), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf,", "= TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\"", "wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind ==", "/ \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert", "class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self)", "== \"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self, url: str) -> None: \"\"\"Test", "= Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\"", "import Workflow from ._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore", "def url() -> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE =", "str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\"", "1 def test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type(", "Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1", "ignore def url() -> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE", "type: ignore def url() -> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\"", "assert wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self, url: str) ->", "url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (), {\"text\":", "url() -> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA", "from ._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore def url()", "argo.workflows.dsl import Workflow from ._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type:", "-> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA /", "# type: ignore def url() -> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test", "_WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file`", "lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name ==", "\"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf,", "`Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, )", "TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self) ->", "= Workflow.from_url(url) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\"", "len(wf.spec.templates) == 1 def test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response", "Workflow from ._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore def", "`Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert", "-> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\":", "suite.\"\"\" @pytest.fixture # type: ignore def url() -> str: \"\"\"Fake URL fixture.\"\"\" class", "\"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name == \"test\"", "\"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE)", "= type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf =", "import flexmock import pytest import requests from argo.workflows.dsl import Workflow from ._base import", "from argo.workflows.dsl import Workflow from ._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture #", "pytest import requests from argo.workflows.dsl import Workflow from ._base import TestCase \"\"\"Workflow test", "assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates)", "def test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response = type( \"Response\",", "import pytest import requests from argo.workflows.dsl import Workflow from ._base import TestCase \"\"\"Workflow", "Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind == \"Workflow\" assert", "self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow) assert", "URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" / \"hello-world.yaml\"", "\"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url`", "test suite.\"\"\" @pytest.fixture # type: ignore def url() -> str: \"\"\"Fake URL fixture.\"\"\"", "== 1 def test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\" fake_response =", "\"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert", "methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name == \"test\" assert wf.kind", "type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url)", "-> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert wf.name", "fake_response = type( \"Response\", (), {\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf", "import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore def url() -> str:", "\"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase): \"\"\"Test Workflow.\"\"\" _WORKFLOW_FILE = TestCase.DATA / \"workflows\" /", "assert len(wf.spec.templates) == 1 def test_from_url(self, url: str) -> None: \"\"\"Test `Workflow.from_url` methods.\"\"\"", "TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore def url() -> str: \"\"\"Fake", "TestCase.DATA / \"workflows\" / \"hello-world.yaml\" def test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf", "\"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore def url() -> str: \"\"\"Fake URL", "{\"text\": self._WORKFLOW_FILE.read_text(), \"raise_for_status\": lambda: None}, ) flexmock(requests).should_receive(\"get\").and_return(fake_response) wf = Workflow.from_url(url) assert isinstance(wf, Workflow)", "\"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self, url: str)", "@pytest.fixture # type: ignore def url() -> str: \"\"\"Fake URL fixture.\"\"\" class TestWorkflow(TestCase):", "wf.name == \"test\" assert wf.kind == \"Workflow\" assert len(wf.spec.templates) == 1 def test_from_url(self,", "._base import TestCase \"\"\"Workflow test suite.\"\"\" @pytest.fixture # type: ignore def url() ->", "test_from_file(self) -> None: \"\"\"Test `Workflow.from_file` methods.\"\"\" wf = Workflow.from_file(self._WORKFLOW_FILE) assert isinstance(wf, Workflow) assert" ]
[ "UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object):", "pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app", "# pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app = UssdApp('test_app') assert app.name", "import UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class", "pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app = UssdApp('test_app') assert", "= pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self):", "flex.ussd.core import UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\")", "= pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app = UssdApp('test_app')", "= pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app = UssdApp('test_app') assert app.name == 'test_app'", "import pytest from flex.ussd.core import UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize #", "xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def", "pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app = UssdApp('test_app') assert app.name ==", "from flex.ussd.core import UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark =", "pytest from flex.ussd.core import UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize # pytestmark", "<reponame>centergy/flex_ussd import pytest from flex.ussd.core import UssdApp xfail = pytest.mark.xfail parametrize = pytest.mark.parametrize", "parametrize = pytest.mark.parametrize # pytestmark = pytest.mark.usefixtures(\"db\") class AppTest(object): def test_init(self): app =" ]
[ "linked list node\"\"\" def __init__(self, val=None): self.data = val self.next = None self.previous", "__init__(self, val=None): self.data = val self.next = None self.previous = None @property def", "self.data = val self.next = None self.previous = None @property def HasNext(self) ->", "self.previous = None @property def HasNext(self) -> bool: return self.next is not None", "= None self.previous = None @property def HasNext(self) -> bool: return self.next is", "class LinkedListNode: \"\"\"Represents a linked list node\"\"\" def __init__(self, val=None): self.data = val", "list node\"\"\" def __init__(self, val=None): self.data = val self.next = None self.previous =", "node\"\"\" def __init__(self, val=None): self.data = val self.next = None self.previous = None", "= val self.next = None self.previous = None @property def HasNext(self) -> bool:", "val self.next = None self.previous = None @property def HasNext(self) -> bool: return", "a linked list node\"\"\" def __init__(self, val=None): self.data = val self.next = None", "None self.previous = None @property def HasNext(self) -> bool: return self.next is not", "val=None): self.data = val self.next = None self.previous = None @property def HasNext(self)", "\"\"\"Represents a linked list node\"\"\" def __init__(self, val=None): self.data = val self.next =", "self.next = None self.previous = None @property def HasNext(self) -> bool: return self.next", "LinkedListNode: \"\"\"Represents a linked list node\"\"\" def __init__(self, val=None): self.data = val self.next", "def __init__(self, val=None): self.data = val self.next = None self.previous = None @property", "<reponame>nataz77/cs-py class LinkedListNode: \"\"\"Represents a linked list node\"\"\" def __init__(self, val=None): self.data =" ]
[ "= parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap", "DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer = parent.timer def hover_button(self): if", "import QtGui import webbrowser __author__ = 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def", "pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap)", "self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap =", "'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer", "self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if", "def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse():", "QtGui import webbrowser __author__ = 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self,", "utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer = parent.timer def", "__author__ = 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel =", "class DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer = parent.timer def hover_button(self):", "parent): self.iconLabel = parent.iconLabel self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap", "= QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"),", "-*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer = parent.timer", "self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else:", "= 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel", "parent.iconLabel self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap)", "QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1,", "self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1, autoraise=True) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap)", "import webbrowser __author__ = 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent):", "hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap)", "self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1, autoraise=True) else: pixmap = QtGui.QPixmap(\"images/icon.svg\")", "__init__(self, parent): self.iconLabel = parent.iconLabel self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10)", "pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse():", "webbrowser __author__ = 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel", "def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1, autoraise=True) else:", "QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap", "if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1, autoraise=True) else: pixmap =", "# -*-coding: utf-8-*- class DeezerIcon(object): def __init__(self, parent): self.iconLabel = parent.iconLabel self.timer =", "def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\")", "= parent.iconLabel self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\")", "if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def", "click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1, autoraise=True) else: pixmap", "self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\") self.iconLabel.setPixmap(pixmap) webbrowser.open(str(\"http://www.deezer.com\"), new=1, autoraise=True)", "from PyQt4 import QtGui import webbrowser __author__ = 'postrowski' # -*-coding: utf-8-*- class", "PyQt4 import QtGui import webbrowser __author__ = 'postrowski' # -*-coding: utf-8-*- class DeezerIcon(object):", "self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self):", "= QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200)", "else: pixmap = QtGui.QPixmap(\"images/icon.svg\") self.iconLabel.setPixmap(pixmap) def click_button(self): if self.iconLabel.underMouse(): self.timer.start(200) pixmap = QtGui.QPixmap(\"images/icon_clicked.svg\")", "parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap = QtGui.QPixmap(\"images/icon_hover.svg\") self.iconLabel.setPixmap(pixmap) else: pixmap =", "self.iconLabel = parent.iconLabel self.timer = parent.timer def hover_button(self): if self.iconLabel.underMouse(): self.timer.start(10) pixmap =" ]
[ "import FileData import pandas as pd import numpy as np file_data = FileData(\"F:\\\\Python", "import numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df =", "FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4,", "[3, 4, np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD'))", "print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan, 1], [np.nan, np.nan,", "FileData import pandas as pd import numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\")", "pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan,", "as pd import numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df)", "np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD')) print(df) df.fillna(0, inplace = True) print(df)", "4, np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD')) print(df)", "as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2,", "numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan,", "np.nan, 0], [3, 4, np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan,", "= FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3,", "pd import numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df", "file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0],", "np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD')) print(df) df.fillna(0, inplace = True)", "Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan,", "pandas as pd import numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0)", "np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD')) print(df) df.fillna(0,", "print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan, 1],", "file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan, 1], [np.nan,", "FileData import FileData import pandas as pd import numpy as np file_data =", "np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df) file_data.df.fillna(0) print(file_data.df) df = pd.DataFrame([[np.nan, 2, np.nan,", "= pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan, 1], [np.nan, np.nan, np.nan, 5],", "import pandas as pd import numpy as np file_data = FileData(\"F:\\\\Python Projects\\\\170622_MDS.txt\") print(file_data.df)", "1], [np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD')) print(df) df.fillna(0, inplace", "[np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]], columns=list('ABCD')) print(df) df.fillna(0, inplace =", "0], [3, 4, np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan, 3, np.nan, 4]],", "df = pd.DataFrame([[np.nan, 2, np.nan, 0], [3, 4, np.nan, 1], [np.nan, np.nan, np.nan,", "from FileData import FileData import pandas as pd import numpy as np file_data", "2, np.nan, 0], [3, 4, np.nan, 1], [np.nan, np.nan, np.nan, 5], [np.nan, 3," ]
[]
[ "place = x.index(i, k) + 1 position.append(place) k = place print(\"\\nTodas as notas:\",", "position.append(place) k = place media = media / 8 print(\"Média das idades cadastradas:\",", "nota de 10 alunos de uma turma e guarde-as em um vetor. No", "= value # alterando valor de j para que a função index conte", "x) maiorIdade = max(x) j = 0 k = 0 for i in", "i == maiorNota: place = x.index(i, k) + 1 position.append(place) k = place", "table = [] for i in range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite", "\"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j = 0 k = 0", "/ 10 maiorNota = max(x) excel = 0 k = 0 position =", "maior nota aparece:\", position) ''' 83) [DESAFIO] Crie uma lógica que preencha um", "vetor de 20 posições com números aleatórios (entre 0 e 99) gerados pelo", "contendo apenas os dados das pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\") #", "= x.index(i, k) + 1 position.append(place) k = place media = media /", "== \"M\": break else: print(\"Você precisa escolher F para Feminino ou M para", "valores > 25 position25.append(value) # add posição na lista j = value #", "j para que a função index conte a partir da posição seguinte if", "um algoritmo que leia o nome, o sexo e o salário de 5", "precisa escolher F para Feminino ou M para Masculino. Tente de novo!\") choice", "um vetor. No final, mostre: a) Qual é a média de idade das", "maior idade:\", position) ''' 82) Faça um algoritmo que leia a nota de", "da média da turma c) Qual foi a maior nota digitada d) Em", "= x.index(i, j) + 1 # posição de valores > 25 position25.append(value) #", "> media: excel += 1 if i == maiorNota: place = x.index(i, k)", "0 position = [] position25 = [] for i in range(8): x.append(int(input(\"Digite sua", "\")) idades.append(int(input(\"Digite a sua idade: \"))) if idades[i] < 18: table.append([nomes[i], idades[i]]) if", "anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a maior idade:\", position) '''", "de 20 posições com números aleatórios (entre 0 e 99) gerados pelo computador.", "de novo!\") choice = input(\"Qual o seu gênero? [F/M] \") return choice print(\"\\nQuestão", "max(x) excel = 0 k = 0 position = [] for i in", "i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84)", "85\\n\") nome = [] genero = [] salario = [] table = []", "# posição de valores > 25 position25.append(value) # add posição na lista j", "0 for i in range(10): nota = float(input(\"Qual a nota do aluno? \"))", "o nome e a idade de 9 pessoas e guarde esses valores em", "if choice == \"F\" or choice == \"M\": break else: print(\"Você precisa escolher", "or choice == \"M\": break else: print(\"Você precisa escolher F para Feminino ou", "uma listagem contendo apenas os dados das pessoas menores de idade. ''' print(\"\\nQuestão", "ganham mais de R$5 mil. ''' # Testando se o usuário digitou a", "position = [] position25 = [] for i in range(8): x.append(int(input(\"Digite sua idade:", "sorted(vetor)) ''' 84) Crie um programa que leia o nome e a idade", "turma c) Qual foi a maior nota digitada d) Em que posições a", "j) + 1 # posição de valores > 25 position25.append(value) # add posição", "media = media / 10 maiorNota = max(x) excel = 0 k =", "a média de idade das pessoas cadastradas b) Em quais posições temos pessoas", "ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia a idade", "[] for i in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade", "1 # posição de valores > 25 position25.append(value) # add posição na lista", "''' print(\"\\nQuestão 82\\n\") x = [] media = 0 for i in range(10):", "acima de 25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a maior", "guarde esses dados em três vetores. No final, mostre uma listagem contendo apenas", "= float(input(\"Qual a nota do aluno? \")) x.append(nota) media += nota media =", "> 25 position25.append(value) # add posição na lista j = value # alterando", "posição na lista j = value # alterando valor de j para que", "idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes = []", "vetor. No final, mostre: a) Qual é a média da turma b) Quantos", "acima da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que a maior nota", "de idade das pessoas cadastradas b) Em quais posições temos pessoas com mais", "25 position25.append(value) # add posição na lista j = value # alterando valor", "funcionárias mulheres que ganham mais de R$5 mil. ''' # Testando se o", "a partir da posição seguinte if i == maiorIdade: place = x.index(i, k)", "''' Estes exercícios fazem parte do curso de Introdução a Algoritmos, ministrado pelo", "listagem contendo apenas os dados das pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\")", "maior idade digitada (podem haver repetições) d) Em que posições digitamos a maior", "x.index(i, k) + 1 position.append(place) k = place media = media / 8", "table != []: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo", "que posições a maior nota aparece ''' print(\"\\nQuestão 82\\n\") x = [] media", "table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if table != []: print(\"\\nNome | Gênero", "dados das funcionárias mulheres que ganham mais de R$5 mil. ''' # Testando", "aparece ''' print(\"\\nQuestão 82\\n\") x = [] media = 0 for i in", "idade de 9 pessoas e guarde esses valores em dois vetores, em posições", "média de idade das pessoas cadastradas b) Em quais posições temos pessoas com", "range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j =", "valores em dois vetores, em posições relacionadas. No final, mostre uma listagem contendo", "place media = media / 8 print(\"Média das idades cadastradas:\", media) print(\"Posições com", "range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a sua idade: \"))) if idades[i]", "maiorNota = max(x) excel = 0 k = 0 position = [] for", "[F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i]", "genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i] == \"F\" and salario[i] >", "= [] salario = [] table = [] for i in range(5): nome.append(input(\"Digite", "o sexo e o salário de 5 funcionários e guarde esses dados em", "os números gerados e depois coloque o vetor em ordem crescente, mostrando no", "!= []: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo que", "5 funcionários e guarde esses dados em três vetores. No final, mostre uma", "0 position = [] for i in x: if i > media: excel", "com números aleatórios (entre 0 e 99) gerados pelo computador. Logo em seguida,", "a maior nota aparece ''' print(\"\\nQuestão 82\\n\") x = [] media = 0", "leia a idade de 8 pessoas e guarde-as em um vetor. No final,", "o vetor em ordem crescente, mostrando no final os valores ordenados. ''' print(\"\\nQuestão", "\"R$\" + str(round(salario[i], 2))]) if table != []: print(\"\\nNome | Gênero | Salário\")", "partir da posição seguinte if i == maiorIdade: place = x.index(i, k) +", "81\\n\") x = [] media = 0 position = [] position25 = []", "listagem contendo apenas os dados das funcionárias mulheres que ganham mais de R$5", "seu salário? R$\"))) if genero[i] == \"F\" and salario[i] > 5000: table.append([nome[i], genero[i],", "84) Crie um programa que leia o nome e a idade de 9", "range(5): nome.append(input(\"Digite o seu nome: \")) resposta = input(\"Qual o seu gênero? [F/M]", "e depois coloque o vetor em ordem crescente, mostrando no final os valores", "idades[i] < 18: table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas menores de idade:\")", "= [] media = 0 for i in range(10): nota = float(input(\"Qual a", "+= nota media = media / 10 maiorNota = max(x) excel = 0", "a idade de 8 pessoas e guarde-as em um vetor. No final, mostre:", "guarde-as em um vetor. No final, mostre: a) Qual é a média de", "print(tabulate(table)) ''' 85) Faça um algoritmo que leia o nome, o sexo e", "\") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i] ==", "Qual é a média de idade das pessoas cadastradas b) Em quais posições", "pelo prof. <NAME> e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um", "Crie uma lógica que preencha um vetor de 20 posições com números aleatórios", "salário? R$\"))) if genero[i] == \"F\" and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\"", "j = 0 k = 0 for i in x: media += i", "das idades cadastradas:\", media) print(\"Posições com idades acima de 25 anos:\", position25) print(\"Maior", "= [] for i in x: if i > media: excel += 1", "vetor = [] for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números", "no final os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor = []", "(entre 0 e 99) gerados pelo computador. Logo em seguida, mostre os números", "curso de Introdução a Algoritmos, ministrado pelo prof. <NAME> e podem ser encontrados", "idades acima de 25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a", "media / 8 print(\"Média das idades cadastradas:\", media) print(\"Posições com idades acima de", "k = 0 position = [] for i in x: if i >", "posições a maior nota aparece ''' print(\"\\nQuestão 82\\n\") x = [] media =", "[] table = [] for i in range(5): nome.append(input(\"Digite o seu nome: \"))", "Masculino. Tente de novo!\") choice = input(\"Qual o seu gênero? [F/M] \") return", "Faça um algoritmo que leia o nome, o sexo e o salário de", "alunos de uma turma e guarde-as em um vetor. No final, mostre: a)", "Feminino ou M para Masculino. Tente de novo!\") choice = input(\"Qual o seu", "No final, mostre: a) Qual é a média da turma b) Quantos alunos", "for i in range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a sua idade:", "print(\"Maior nota:\", maiorNota) print(\"Posições em que a maior nota aparece:\", position) ''' 83)", "sua idade: \"))) if idades[i] < 18: table.append([nomes[i], idades[i]]) if table != []:", "ou M para Masculino. Tente de novo!\") choice = input(\"Qual o seu gênero?", "place = x.index(i, k) + 1 position.append(place) k = place media = media", "\")) resposta = input(\"Qual o seu gênero? [F/M] \") resposta = test(resposta) genero.append(resposta)", "digitamos a maior idade ''' print(\"Questão 81\\n\") x = [] media = 0", "e guarde-as em um vetor. No final, mostre: a) Qual é a média", "Algoritmos, ministrado pelo prof. <NAME> e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81)", "Em quais posições temos pessoas com mais de 25 anos c) Qual foi", "''' 83) [DESAFIO] Crie uma lógica que preencha um vetor de 20 posições", "algoritmo que leia o nome, o sexo e o salário de 5 funcionários", "5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if table != []: print(\"\\nNome |", "a nota do aluno? \")) x.append(nota) media += nota media = media /", "in range(5): nome.append(input(\"Digite o seu nome: \")) resposta = input(\"Qual o seu gênero?", "aluno? \")) x.append(nota) media += nota media = media / 10 maiorNota =", "[] for i in x: if i > media: excel += 1 if", "i == maiorIdade: place = x.index(i, k) + 1 position.append(place) k = place", "''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes = [] idades", "no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia a idade de 8", "i > 25: value = x.index(i, j) + 1 # posição de valores", "que posições digitamos a maior idade ''' print(\"Questão 81\\n\") x = [] media", "programa que leia a idade de 8 pessoas e guarde-as em um vetor.", "random vetor = [] for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor)", "resposta = input(\"Qual o seu gênero? [F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual", "em ordem crescente, mostrando no final os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import", "mostre: a) Qual é a média da turma b) Quantos alunos estão acima", "''' # Testando se o usuário digitou a letra correta def test(choice): while", "x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd de alunos acima da média:\", excel)", "e 99) gerados pelo computador. Logo em seguida, mostre os números gerados e", "+ 1 # posição de valores > 25 position25.append(value) # add posição na", "que leia a idade de 8 pessoas e guarde-as em um vetor. No", "tabulate import tabulate nomes = [] idades = [] table = [] for", "gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\") nome = [] genero = []", "conte a partir da posição seguinte if i == maiorIdade: place = x.index(i,", "99) gerados pelo computador. Logo em seguida, mostre os números gerados e depois", "10 alunos de uma turma e guarde-as em um vetor. No final, mostre:", "a maior idade digitada (podem haver repetições) d) Em que posições digitamos a", "alunos estão acima da média da turma c) Qual foi a maior nota", "in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie", "media / 10 maiorNota = max(x) excel = 0 k = 0 position", "de R$5 mil. ''' # Testando se o usuário digitou a letra correta", "lista j = value # alterando valor de j para que a função", "0 k = 0 position = [] for i in x: if i", "# add posição na lista j = value # alterando valor de j", "exercícios fazem parte do curso de Introdução a Algoritmos, ministrado pelo prof. <NAME>", "82) Faça um algoritmo que leia a nota de 10 alunos de uma", "= 0 for i in x: media += i if i > 25:", "se o usuário digitou a letra correta def test(choice): while True: if choice", "e a idade de 9 pessoas e guarde esses valores em dois vetores,", "novo!\") choice = input(\"Qual o seu gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\")", "< 18: table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas menores de idade:\") print(tabulate(table))", "usuário digitou a letra correta def test(choice): while True: if choice == \"F\"", "print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um programa que leia o nome e", "[] media = 0 for i in range(10): nota = float(input(\"Qual a nota", "e guarde esses valores em dois vetores, em posições relacionadas. No final, mostre", "nome.append(input(\"Digite o seu nome: \")) resposta = input(\"Qual o seu gênero? [F/M] \")", "else: print(\"Você precisa escolher F para Feminino ou M para Masculino. Tente de", "25: value = x.index(i, j) + 1 # posição de valores > 25", "= max(x) j = 0 k = 0 for i in x: media", "que preencha um vetor de 20 posições com números aleatórios (entre 0 e", "Introdução a Algoritmos, ministrado pelo prof. <NAME> e podem ser encontrados no site", "resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i] == \"F\"", "= place print(\"\\nTodas as notas:\", x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd de", "aleatórios (entre 0 e 99) gerados pelo computador. Logo em seguida, mostre os", "for i in range(5): nome.append(input(\"Digite o seu nome: \")) resposta = input(\"Qual o", "é a média de idade das pessoas cadastradas b) Em quais posições temos", "um algoritmo que leia a nota de 10 alunos de uma turma e", "max(x) j = 0 k = 0 for i in x: media +=", "excel = 0 k = 0 position = [] for i in x:", "position25 = [] for i in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\",", "= [] position25 = [] for i in range(8): x.append(int(input(\"Digite sua idade: \")))", "= media / 8 print(\"Média das idades cadastradas:\", media) print(\"Posições com idades acima", "[] idades = [] table = [] for i in range(9): nomes.append(input(\"Digite o", "in range(10): nota = float(input(\"Qual a nota do aluno? \")) x.append(nota) media +=", "\"))) if idades[i] < 18: table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas menores", "a) Qual é a média da turma b) Quantos alunos estão acima da", "em um vetor. No final, mostre: a) Qual é a média da turma", "c) Qual foi a maior idade digitada (podem haver repetições) d) Em que", "sexo e o salário de 5 funcionários e guarde esses dados em três", "a maior nota digitada d) Em que posições a maior nota aparece '''", "funcionários e guarde esses dados em três vetores. No final, mostre uma listagem", "que a maior nota aparece:\", position) ''' 83) [DESAFIO] Crie uma lógica que", "import tabulate nomes = [] idades = [] table = [] for i", "return choice print(\"\\nQuestão 85\\n\") nome = [] genero = [] salario = []", "para que a função index conte a partir da posição seguinte if i", "99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um programa que", "[] for i in range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a sua", "ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor = [] for i in range(20):", "digitou a letra correta def test(choice): while True: if choice == \"F\" or", "= test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i] == \"F\" and", "letra correta def test(choice): while True: if choice == \"F\" or choice ==", "genero[i] == \"F\" and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))])", "a maior idade ''' print(\"Questão 81\\n\") x = [] media = 0 position", "em um vetor. No final, mostre: a) Qual é a média de idade", "= input(\"Qual o seu gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\") nome =", "a maior idade:\", position) ''' 82) Faça um algoritmo que leia a nota", "encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia a idade de", "idades.append(int(input(\"Digite a sua idade: \"))) if idades[i] < 18: table.append([nomes[i], idades[i]]) if table", "round(media, 2)) print(\"Qtd de alunos acima da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições", "digitada d) Em que posições a maior nota aparece ''' print(\"\\nQuestão 82\\n\") x", "o nome, o sexo e o salário de 5 funcionários e guarde esses", "R$\"))) if genero[i] == \"F\" and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" +", "temos pessoas com mais de 25 anos c) Qual foi a maior idade", "if i > 25: value = x.index(i, j) + 1 # posição de", "nota digitada d) Em que posições a maior nota aparece ''' print(\"\\nQuestão 82\\n\")", "menores de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes", "de Introdução a Algoritmos, ministrado pelo prof. <NAME> e podem ser encontrados no", "crescente, mostrando no final os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor", "maiorIdade) print(\"Posições com a maior idade:\", position) ''' 82) Faça um algoritmo que", "d) Em que posições a maior nota aparece ''' print(\"\\nQuestão 82\\n\") x =", "[]: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo que leia", "18: table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) '''", "dados em três vetores. No final, mostre uma listagem contendo apenas os dados", "de valores > 25 position25.append(value) # add posição na lista j = value", "1 position.append(place) k = place print(\"\\nTodas as notas:\", x) print(\"Média da turma:\", round(media,", "float(input(\"Qual a nota do aluno? \")) x.append(nota) media += nota media = media", "position) ''' 83) [DESAFIO] Crie uma lógica que preencha um vetor de 20", "função index conte a partir da posição seguinte if i == maiorIdade: place", "media = media / 8 print(\"Média das idades cadastradas:\", media) print(\"Posições com idades", "for i in x: if i > media: excel += 1 if i", "algoritmo que leia a nota de 10 alunos de uma turma e guarde-as", "média da turma b) Quantos alunos estão acima da média da turma c)", "print(\"Você precisa escolher F para Feminino ou M para Masculino. Tente de novo!\")", "o salário de 5 funcionários e guarde esses dados em três vetores. No", "nota media = media / 10 maiorNota = max(x) excel = 0 k", "que leia o nome e a idade de 9 pessoas e guarde esses", "in x: media += i if i > 25: value = x.index(i, j)", "gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um programa que leia o", "final os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor = [] for", "seu gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\") nome = [] genero =", "position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a maior idade:\", position) ''' 82)", "range(10): nota = float(input(\"Qual a nota do aluno? \")) x.append(nota) media += nota", "= place media = media / 8 print(\"Média das idades cadastradas:\", media) print(\"Posições", "= [] idades = [] table = [] for i in range(9): nomes.append(input(\"Digite", "in range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a sua idade: \"))) if", "excel += 1 if i == maiorNota: place = x.index(i, k) + 1", "[DESAFIO] Crie uma lógica que preencha um vetor de 20 posições com números", "turma e guarde-as em um vetor. No final, mostre: a) Qual é a", "maior nota digitada d) Em que posições a maior nota aparece ''' print(\"\\nQuestão", "alunos acima da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que a maior", "e guarde esses dados em três vetores. No final, mostre uma listagem contendo", "foi a maior nota digitada d) Em que posições a maior nota aparece", "maior nota aparece ''' print(\"\\nQuestão 82\\n\") x = [] media = 0 for", "10 maiorNota = max(x) excel = 0 k = 0 position = []", "if i == maiorIdade: place = x.index(i, k) + 1 position.append(place) k =", "if idades[i] < 18: table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas menores de", "and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if table !=", "a maior nota aparece:\", position) ''' 83) [DESAFIO] Crie uma lógica que preencha", "sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j = 0 k", "b) Em quais posições temos pessoas com mais de 25 anos c) Qual", "print(\"Questão 81\\n\") x = [] media = 0 position = [] position25 =", "gerados e depois coloque o vetor em ordem crescente, mostrando no final os", "83) [DESAFIO] Crie uma lógica que preencha um vetor de 20 posições com", "choice = input(\"Qual o seu gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\") nome", "for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) '''", "nota = float(input(\"Qual a nota do aluno? \")) x.append(nota) media += nota media", "''' 84) Crie um programa que leia o nome e a idade de", "apenas os dados das funcionárias mulheres que ganham mais de R$5 mil. '''", "\"F\" and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if table", "= [] genero = [] salario = [] table = [] for i", "salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if table != []:", "seu nome: \")) idades.append(int(input(\"Digite a sua idade: \"))) if idades[i] < 18: table.append([nomes[i],", "print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um programa que leia", "estão acima da média da turma c) Qual foi a maior nota digitada", "preencha um vetor de 20 posições com números aleatórios (entre 0 e 99)", "20 posições com números aleatórios (entre 0 e 99) gerados pelo computador. Logo", "[] for i in range(5): nome.append(input(\"Digite o seu nome: \")) resposta = input(\"Qual", "em seguida, mostre os números gerados e depois coloque o vetor em ordem", "computador. Logo em seguida, mostre os números gerados e depois coloque o vetor", "+ str(round(salario[i], 2))]) if table != []: print(\"\\nNome | Gênero | Salário\") print(tabulate(table))", "correta def test(choice): while True: if choice == \"F\" or choice == \"M\":", "esses valores em dois vetores, em posições relacionadas. No final, mostre uma listagem", "= x.index(i, k) + 1 position.append(place) k = place print(\"\\nTodas as notas:\", x)", "print(\"\\nQuestão 82\\n\") x = [] media = 0 for i in range(10): nota", "contendo apenas os dados das funcionárias mulheres que ganham mais de R$5 mil.", "k) + 1 position.append(place) k = place media = media / 8 print(\"Média", "a) Qual é a média de idade das pessoas cadastradas b) Em quais", "idade de 8 pessoas e guarde-as em um vetor. No final, mostre: a)", "k = 0 for i in x: media += i if i >", "x = [] media = 0 position = [] position25 = [] for", "da turma:\", round(media, 2)) print(\"Qtd de alunos acima da média:\", excel) print(\"Maior nota:\",", "um programa que leia o nome e a idade de 9 pessoas e", "\")) x.append(nota) media += nota media = media / 10 maiorNota = max(x)", "salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i] == \"F\" and salario[i] > 5000:", "fazem parte do curso de Introdução a Algoritmos, ministrado pelo prof. <NAME> e", "index conte a partir da posição seguinte if i == maiorIdade: place =", "idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo que leia o nome, o sexo", "No final, mostre uma listagem contendo apenas os dados das funcionárias mulheres que", "cadastradas b) Em quais posições temos pessoas com mais de 25 anos c)", "os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor = [] for i", "anos c) Qual foi a maior idade digitada (podem haver repetições) d) Em", "= [] for i in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x)", "mostrando no final os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor =", "pessoas com mais de 25 anos c) Qual foi a maior idade digitada", "um vetor. No final, mostre: a) Qual é a média da turma b)", "print(\"\\nQuestão 85\\n\") nome = [] genero = [] salario = [] table =", "Tente de novo!\") choice = input(\"Qual o seu gênero? [F/M] \") return choice", "https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes = [] idades = [] table =", "o seu gênero? [F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário?", "= 0 position = [] position25 = [] for i in range(8): x.append(int(input(\"Digite", "da turma b) Quantos alunos estão acima da média da turma c) Qual", "tabulate nomes = [] idades = [] table = [] for i in", "x.index(i, k) + 1 position.append(place) k = place print(\"\\nTodas as notas:\", x) print(\"Média", "ministrado pelo prof. <NAME> e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie", "= [] table = [] for i in range(9): nomes.append(input(\"Digite o seu nome:", "\") return choice print(\"\\nQuestão 85\\n\") nome = [] genero = [] salario =", "print(\"Média da turma:\", round(media, 2)) print(\"Qtd de alunos acima da média:\", excel) print(\"Maior", "Em que posições digitamos a maior idade ''' print(\"Questão 81\\n\") x = []", "> 25: value = x.index(i, j) + 1 # posição de valores >", "ordem crescente, mostrando no final os valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random", "x.index(i, j) + 1 # posição de valores > 25 position25.append(value) # add", "nome = [] genero = [] salario = [] table = [] for", "media: excel += 1 if i == maiorNota: place = x.index(i, k) +", "site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia a idade de 8 pessoas", "2)) print(\"Qtd de alunos acima da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em", "gênero? [F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if", "que leia a nota de 10 alunos de uma turma e guarde-as em", "[] media = 0 position = [] position25 = [] for i in", "if i == maiorNota: place = x.index(i, k) + 1 position.append(place) k =", "parte do curso de Introdução a Algoritmos, ministrado pelo prof. <NAME> e podem", "o seu salário? R$\"))) if genero[i] == \"F\" and salario[i] > 5000: table.append([nome[i],", "x.append(nota) media += nota media = media / 10 maiorNota = max(x) excel", "from tabulate import tabulate nomes = [] idades = [] table = []", "uma lógica que preencha um vetor de 20 posições com números aleatórios (entre", "menores de idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo que leia o nome,", "que a função index conte a partir da posição seguinte if i ==", "[F/M] \") return choice print(\"\\nQuestão 85\\n\") nome = [] genero = [] salario", "choice == \"F\" or choice == \"M\": break else: print(\"Você precisa escolher F", "import random vetor = [] for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\",", "82\\n\") x = [] media = 0 for i in range(10): nota =", "final, mostre uma listagem contendo apenas os dados das pessoas menores de idade.", "= 0 k = 0 position = [] for i in x: if", "depois coloque o vetor em ordem crescente, mostrando no final os valores ordenados.", "Crie um programa que leia o nome e a idade de 9 pessoas", "Testando se o usuário digitou a letra correta def test(choice): while True: if", "turma b) Quantos alunos estão acima da média da turma c) Qual foi", "posições com números aleatórios (entre 0 e 99) gerados pelo computador. Logo em", "add posição na lista j = value # alterando valor de j para", "um vetor de 20 posições com números aleatórios (entre 0 e 99) gerados", "1 if i == maiorNota: place = x.index(i, k) + 1 position.append(place) k", "que leia o nome, o sexo e o salário de 5 funcionários e", "= media / 10 maiorNota = max(x) excel = 0 k = 0", "idade das pessoas cadastradas b) Em quais posições temos pessoas com mais de", "mostre uma listagem contendo apenas os dados das funcionárias mulheres que ganham mais", "que ganham mais de R$5 mil. ''' # Testando se o usuário digitou", "k) + 1 position.append(place) k = place print(\"\\nTodas as notas:\", x) print(\"Média da", "de alunos acima da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que a", "de j para que a função index conte a partir da posição seguinte", "vetores. No final, mostre uma listagem contendo apenas os dados das funcionárias mulheres", "maior idade ''' print(\"Questão 81\\n\") x = [] media = 0 position =", "print(\"Posições com idades acima de 25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições", "83\\n\") import random vetor = [] for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números", "= max(x) excel = 0 k = 0 position = [] for i", "+ 1 position.append(place) k = place print(\"\\nTodas as notas:\", x) print(\"Média da turma:\",", "0 e 99) gerados pelo computador. Logo em seguida, mostre os números gerados", "a letra correta def test(choice): while True: if choice == \"F\" or choice", "digitada (podem haver repetições) d) Em que posições digitamos a maior idade '''", "range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um", "== maiorIdade: place = x.index(i, k) + 1 position.append(place) k = place media", "<NAME> e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que", "> 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if table != []: print(\"\\nNome", "nota:\", maiorNota) print(\"Posições em que a maior nota aparece:\", position) ''' 83) [DESAFIO]", "k = place print(\"\\nTodas as notas:\", x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd", "= 0 k = 0 for i in x: media += i if", "uma turma e guarde-as em um vetor. No final, mostre: a) Qual é", "mulheres que ganham mais de R$5 mil. ''' # Testando se o usuário", "idades[i]]) if table != []: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85) Faça", "No final, mostre: a) Qual é a média de idade das pessoas cadastradas", "mostre os números gerados e depois coloque o vetor em ordem crescente, mostrando", "três vetores. No final, mostre uma listagem contendo apenas os dados das funcionárias", "seguinte if i == maiorIdade: place = x.index(i, k) + 1 position.append(place) k", "25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a maior idade:\", position)", "84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes = [] idades = []", "for i in x: media += i if i > 25: value =", "idade ''' print(\"Questão 81\\n\") x = [] media = 0 position = []", "print(\"Posições com a maior idade:\", position) ''' 82) Faça um algoritmo que leia", "for i in range(10): nota = float(input(\"Qual a nota do aluno? \")) x.append(nota)", "mostre uma listagem contendo apenas os dados das pessoas menores de idade. '''", "pessoas cadastradas b) Em quais posições temos pessoas com mais de 25 anos", "o seu nome: \")) idades.append(int(input(\"Digite a sua idade: \"))) if idades[i] < 18:", "idades cadastradas:\", media) print(\"Posições com idades acima de 25 anos:\", position25) print(\"Maior idade", "media = 0 for i in range(10): nota = float(input(\"Qual a nota do", "= [] for i in range(5): nome.append(input(\"Digite o seu nome: \")) resposta =", "de 9 pessoas e guarde esses valores em dois vetores, em posições relacionadas.", "test(choice): while True: if choice == \"F\" or choice == \"M\": break else:", "vetores, em posições relacionadas. No final, mostre uma listagem contendo apenas os dados", "média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que a maior nota aparece:\", position)", "números aleatórios (entre 0 e 99) gerados pelo computador. Logo em seguida, mostre", "''' print(\"\\nQuestão 83\\n\") import random vetor = [] for i in range(20): vetor.append(random.randint(0,", "print(\"\\nQuestão 83\\n\") import random vetor = [] for i in range(20): vetor.append(random.randint(0, 99))", "de 25 anos c) Qual foi a maior idade digitada (podem haver repetições)", "8 pessoas e guarde-as em um vetor. No final, mostre: a) Qual é", "inseridas:\", x) maiorIdade = max(x) j = 0 k = 0 for i", "o seu gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\") nome = [] genero", "1 position.append(place) k = place media = media / 8 print(\"Média das idades", "da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que a maior nota aparece:\",", "pessoas e guarde-as em um vetor. No final, mostre: a) Qual é a", "print(\"Média das idades cadastradas:\", media) print(\"Posições com idades acima de 25 anos:\", position25)", "a função index conte a partir da posição seguinte if i == maiorIdade:", "valores ordenados. ''' print(\"\\nQuestão 83\\n\") import random vetor = [] for i in", "+= i if i > 25: value = x.index(i, j) + 1 #", "acima da média da turma c) Qual foi a maior nota digitada d)", "gerados pelo computador. Logo em seguida, mostre os números gerados e depois coloque", "i in range(5): nome.append(input(\"Digite o seu nome: \")) resposta = input(\"Qual o seu", "position) ''' 82) Faça um algoritmo que leia a nota de 10 alunos", "para Feminino ou M para Masculino. Tente de novo!\") choice = input(\"Qual o", "= [] table = [] for i in range(5): nome.append(input(\"Digite o seu nome:", "maiorNota) print(\"Posições em que a maior nota aparece:\", position) ''' 83) [DESAFIO] Crie", "for i in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade =", "print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j = 0 k = 0 for", "== maiorNota: place = x.index(i, k) + 1 position.append(place) k = place print(\"\\nTodas", "um programa que leia a idade de 8 pessoas e guarde-as em um", "a idade de 9 pessoas e guarde esses valores em dois vetores, em", "[] salario = [] table = [] for i in range(5): nome.append(input(\"Digite o", "R$5 mil. ''' # Testando se o usuário digitou a letra correta def", "de 10 alunos de uma turma e guarde-as em um vetor. No final,", "posições digitamos a maior idade ''' print(\"Questão 81\\n\") x = [] media =", "https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia a idade de 8 pessoas e", "nome e a idade de 9 pessoas e guarde esses valores em dois", "\"M\": break else: print(\"Você precisa escolher F para Feminino ou M para Masculino.", "= [] for i in range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a", "i in range(9): nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a sua idade: \")))", "esses dados em três vetores. No final, mostre uma listagem contendo apenas os", "x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j = 0", "em dois vetores, em posições relacionadas. No final, mostre uma listagem contendo apenas", "salário de 5 funcionários e guarde esses dados em três vetores. No final,", "uma listagem contendo apenas os dados das funcionárias mulheres que ganham mais de", "nota do aluno? \")) x.append(nota) media += nota media = media / 10", "para Masculino. Tente de novo!\") choice = input(\"Qual o seu gênero? [F/M] \")", "a Algoritmos, ministrado pelo prof. <NAME> e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf", "i in range(10): nota = float(input(\"Qual a nota do aluno? \")) x.append(nota) media", "o usuário digitou a letra correta def test(choice): while True: if choice ==", "final, mostre uma listagem contendo apenas os dados das funcionárias mulheres que ganham", "idade digitada (podem haver repetições) d) Em que posições digitamos a maior idade", "input(\"Qual o seu gênero? [F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu", "81) Crie um programa que leia a idade de 8 pessoas e guarde-as", "Crie um programa que leia a idade de 8 pessoas e guarde-as em", "= 0 position = [] for i in x: if i > media:", "0 k = 0 for i in x: media += i if i", "Quantos alunos estão acima da média da turma c) Qual foi a maior", "do aluno? \")) x.append(nota) media += nota media = media / 10 maiorNota", "prof. <NAME> e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa", "cadastradas:\", media) print(\"Posições com idades acima de 25 anos:\", position25) print(\"Maior idade digitada:\",", "com idades acima de 25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com", "x = [] media = 0 for i in range(10): nota = float(input(\"Qual", "True: if choice == \"F\" or choice == \"M\": break else: print(\"Você precisa", "posição seguinte if i == maiorIdade: place = x.index(i, k) + 1 position.append(place)", "das pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import", "de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes =", "alterando valor de j para que a função index conte a partir da", "números gerados e depois coloque o vetor em ordem crescente, mostrando no final", "de uma turma e guarde-as em um vetor. No final, mostre: a) Qual", "in x: if i > media: excel += 1 if i == maiorNota:", "idade digitada:\", maiorIdade) print(\"Posições com a maior idade:\", position) ''' 82) Faça um", "''' print(\"Questão 81\\n\") x = [] media = 0 position = [] position25", "j = value # alterando valor de j para que a função index", "vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um programa que leia o nome", "em que a maior nota aparece:\", position) ''' 83) [DESAFIO] Crie uma lógica", "aparece:\", position) ''' 83) [DESAFIO] Crie uma lógica que preencha um vetor de", "choice print(\"\\nQuestão 85\\n\") nome = [] genero = [] salario = [] table", "table = [] for i in range(5): nome.append(input(\"Digite o seu nome: \")) resposta", "x: if i > media: excel += 1 if i == maiorNota: place", "final, mostre: a) Qual é a média da turma b) Quantos alunos estão", "os dados das pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from", "é a média da turma b) Quantos alunos estão acima da média da", "''' 85) Faça um algoritmo que leia o nome, o sexo e o", "pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate", "idade: \"))) if idades[i] < 18: table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas", "e o salário de 5 funcionários e guarde esses dados em três vetores.", "== \"F\" and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i], 2))]) if", "Qual foi a maior idade digitada (podem haver repetições) d) Em que posições", "value # alterando valor de j para que a função index conte a", "= 0 for i in range(10): nota = float(input(\"Qual a nota do aluno?", "nomes.append(input(\"Digite o seu nome: \")) idades.append(int(input(\"Digite a sua idade: \"))) if idades[i] <", "test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\"))) if genero[i] == \"F\" and salario[i]", "e podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia", "da posição seguinte if i == maiorIdade: place = x.index(i, k) + 1", "break else: print(\"Você precisa escolher F para Feminino ou M para Masculino. Tente", "idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j = 0 k =", "k = place media = media / 8 print(\"Média das idades cadastradas:\", media)", "leia a nota de 10 alunos de uma turma e guarde-as em um", "guarde-as em um vetor. No final, mostre: a) Qual é a média da", "9 pessoas e guarde esses valores em dois vetores, em posições relacionadas. No", "+ 1 position.append(place) k = place media = media / 8 print(\"Média das", "position25.append(value) # add posição na lista j = value # alterando valor de", "# alterando valor de j para que a função index conte a partir", "i in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x)", "print(\"\\nTodas as notas:\", x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd de alunos acima", "valor de j para que a função index conte a partir da posição", "o seu nome: \")) resposta = input(\"Qual o seu gênero? [F/M] \") resposta", "i if i > 25: value = x.index(i, j) + 1 # posição", "b) Quantos alunos estão acima da média da turma c) Qual foi a", "in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades inseridas:\", x) maiorIdade = max(x) j", "as notas:\", x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd de alunos acima da", "print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a maior idade:\", position) ''' 82) Faça", "position = [] for i in x: if i > media: excel +=", "nomes = [] idades = [] table = [] for i in range(9):", "de idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo que leia o nome, o", "coloque o vetor em ordem crescente, mostrando no final os valores ordenados. '''", "posição de valores > 25 position25.append(value) # add posição na lista j =", "+= 1 if i == maiorNota: place = x.index(i, k) + 1 position.append(place)", "escolher F para Feminino ou M para Masculino. Tente de novo!\") choice =", "= [] for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\",", "de 25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade) print(\"Posições com a maior idade:\",", "[] position25 = [] for i in range(8): x.append(int(input(\"Digite sua idade: \"))) print(\"\\nIdades", "i in x: media += i if i > 25: value = x.index(i,", "F para Feminino ou M para Masculino. Tente de novo!\") choice = input(\"Qual", "de 5 funcionários e guarde esses dados em três vetores. No final, mostre", "(podem haver repetições) d) Em que posições digitamos a maior idade ''' print(\"Questão", "na lista j = value # alterando valor de j para que a", "turma:\", round(media, 2)) print(\"Qtd de alunos acima da média:\", excel) print(\"Maior nota:\", maiorNota)", "das pessoas cadastradas b) Em quais posições temos pessoas com mais de 25", "maiorNota: place = x.index(i, k) + 1 position.append(place) k = place print(\"\\nTodas as", "== \"F\" or choice == \"M\": break else: print(\"Você precisa escolher F para", "maiorIdade = max(x) j = 0 k = 0 for i in x:", "\"F\" or choice == \"M\": break else: print(\"Você precisa escolher F para Feminino", "quais posições temos pessoas com mais de 25 anos c) Qual foi a", "media = 0 position = [] position25 = [] for i in range(8):", "posições temos pessoas com mais de 25 anos c) Qual foi a maior", "do curso de Introdução a Algoritmos, ministrado pelo prof. <NAME> e podem ser", "a nota de 10 alunos de uma turma e guarde-as em um vetor.", "i > media: excel += 1 if i == maiorNota: place = x.index(i,", "print(\"Qtd de alunos acima da média:\", excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que", "guarde esses valores em dois vetores, em posições relacionadas. No final, mostre uma", "foi a maior idade digitada (podem haver repetições) d) Em que posições digitamos", "relacionadas. No final, mostre uma listagem contendo apenas os dados das pessoas menores", "mostre: a) Qual é a média de idade das pessoas cadastradas b) Em", "print(\"Posições em que a maior nota aparece:\", position) ''' 83) [DESAFIO] Crie uma", "lógica que preencha um vetor de 20 posições com números aleatórios (entre 0", "programa que leia o nome e a idade de 9 pessoas e guarde", "if i > media: excel += 1 if i == maiorNota: place =", "média da turma c) Qual foi a maior nota digitada d) Em que", "nome: \")) resposta = input(\"Qual o seu gênero? [F/M] \") resposta = test(resposta)", "choice == \"M\": break else: print(\"Você precisa escolher F para Feminino ou M", "vetor. No final, mostre: a) Qual é a média de idade das pessoas", "media += nota media = media / 10 maiorNota = max(x) excel =", "0 for i in x: media += i if i > 25: value", "seu nome: \")) resposta = input(\"Qual o seu gênero? [F/M] \") resposta =", "repetições) d) Em que posições digitamos a maior idade ''' print(\"Questão 81\\n\") x", "[] table = [] for i in range(9): nomes.append(input(\"Digite o seu nome: \"))", "pessoas e guarde esses valores em dois vetores, em posições relacionadas. No final,", "print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes = [] idades =", "de 8 pessoas e guarde-as em um vetor. No final, mostre: a) Qual", "notas:\", x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd de alunos acima da média:\",", "idade:\", position) ''' 82) Faça um algoritmo que leia a nota de 10", "/ 8 print(\"Média das idades cadastradas:\", media) print(\"Posições com idades acima de 25", "com mais de 25 anos c) Qual foi a maior idade digitada (podem", "vetor em ordem crescente, mostrando no final os valores ordenados. ''' print(\"\\nQuestão 83\\n\")", "os dados das funcionárias mulheres que ganham mais de R$5 mil. ''' #", "haver repetições) d) Em que posições digitamos a maior idade ''' print(\"Questão 81\\n\")", "em três vetores. No final, mostre uma listagem contendo apenas os dados das", "ordenados:\", sorted(vetor)) ''' 84) Crie um programa que leia o nome e a", "a média da turma b) Quantos alunos estão acima da média da turma", "c) Qual foi a maior nota digitada d) Em que posições a maior", "M para Masculino. Tente de novo!\") choice = input(\"Qual o seu gênero? [F/M]", "No final, mostre uma listagem contendo apenas os dados das pessoas menores de", "das funcionárias mulheres que ganham mais de R$5 mil. ''' # Testando se", "while True: if choice == \"F\" or choice == \"M\": break else: print(\"Você", "d) Em que posições digitamos a maior idade ''' print(\"Questão 81\\n\") x =", "position.append(place) k = place print(\"\\nTodas as notas:\", x) print(\"Média da turma:\", round(media, 2))", "dois vetores, em posições relacionadas. No final, mostre uma listagem contendo apenas os", "# https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate import tabulate nomes = [] idades = [] table", "mil. ''' # Testando se o usuário digitou a letra correta def test(choice):", "podem ser encontrados no site https://www.cursoemvideo.com/wp-content/uploads/2019/08/exercicios-algoritmos.pdf 81) Crie um programa que leia a", "# Testando se o usuário digitou a letra correta def test(choice): while True:", "Estes exercícios fazem parte do curso de Introdução a Algoritmos, ministrado pelo prof.", "mais de 25 anos c) Qual foi a maior idade digitada (podem haver", "nota aparece:\", position) ''' 83) [DESAFIO] Crie uma lógica que preencha um vetor", "25 anos c) Qual foi a maior idade digitada (podem haver repetições) d)", "digitada:\", maiorIdade) print(\"Posições com a maior idade:\", position) ''' 82) Faça um algoritmo", "table.append([nomes[i], idades[i]]) if table != []: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85)", "print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85) Faça um algoritmo que leia o", "maiorIdade: place = x.index(i, k) + 1 position.append(place) k = place media =", "85) Faça um algoritmo que leia o nome, o sexo e o salário", "com a maior idade:\", position) ''' 82) Faça um algoritmo que leia a", "vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor)) ''' 84) Crie um programa", "apenas os dados das pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output", "def test(choice): while True: if choice == \"F\" or choice == \"M\": break", "posições relacionadas. No final, mostre uma listagem contendo apenas os dados das pessoas", "salario = [] table = [] for i in range(5): nome.append(input(\"Digite o seu", "dados das pessoas menores de idade. ''' print(\"\\nQuestão 84\\n\") # https://stackoverflow.com/questions/8356501/python-format-tabular-output from tabulate", "Em que posições a maior nota aparece ''' print(\"\\nQuestão 82\\n\") x = []", "''' 82) Faça um algoritmo que leia a nota de 10 alunos de", "media) print(\"Posições com idades acima de 25 anos:\", position25) print(\"Maior idade digitada:\", maiorIdade)", "final, mostre: a) Qual é a média de idade das pessoas cadastradas b)", "i in x: if i > media: excel += 1 if i ==", "leia o nome, o sexo e o salário de 5 funcionários e guarde", "da turma c) Qual foi a maior nota digitada d) Em que posições", "seguida, mostre os números gerados e depois coloque o vetor em ordem crescente,", "nome: \")) idades.append(int(input(\"Digite a sua idade: \"))) if idades[i] < 18: table.append([nomes[i], idades[i]])", "if table != []: print(\"\\nPessoas menores de idade:\") print(tabulate(table)) ''' 85) Faça um", "input(\"Qual o seu gênero? [F/M] \") return choice print(\"\\nQuestão 85\\n\") nome = []", "if genero[i] == \"F\" and salario[i] > 5000: table.append([nome[i], genero[i], \"R$\" + str(round(salario[i],", "genero[i], \"R$\" + str(round(salario[i], 2))]) if table != []: print(\"\\nNome | Gênero |", "x: media += i if i > 25: value = x.index(i, j) +", "media += i if i > 25: value = x.index(i, j) + 1", "= input(\"Qual o seu gênero? [F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o", "= [] media = 0 position = [] position25 = [] for i", "nome, o sexo e o salário de 5 funcionários e guarde esses dados", "nota aparece ''' print(\"\\nQuestão 82\\n\") x = [] media = 0 for i", "8 print(\"Média das idades cadastradas:\", media) print(\"Posições com idades acima de 25 anos:\",", "leia o nome e a idade de 9 pessoas e guarde esses valores", "Qual foi a maior nota digitada d) Em que posições a maior nota", "Faça um algoritmo que leia a nota de 10 alunos de uma turma", "Logo em seguida, mostre os números gerados e depois coloque o vetor em", "value = x.index(i, j) + 1 # posição de valores > 25 position25.append(value)", "em posições relacionadas. No final, mostre uma listagem contendo apenas os dados das", "excel) print(\"Maior nota:\", maiorNota) print(\"Posições em que a maior nota aparece:\", position) '''", "[] genero = [] salario = [] table = [] for i in", "mais de R$5 mil. ''' # Testando se o usuário digitou a letra", "a sua idade: \"))) if idades[i] < 18: table.append([nomes[i], idades[i]]) if table !=", "genero = [] salario = [] table = [] for i in range(5):", "place print(\"\\nTodas as notas:\", x) print(\"Média da turma:\", round(media, 2)) print(\"Qtd de alunos", "Qual é a média da turma b) Quantos alunos estão acima da média", "pelo computador. Logo em seguida, mostre os números gerados e depois coloque o", "seu gênero? [F/M] \") resposta = test(resposta) genero.append(resposta) salario.append(float(input(\"Qual o seu salário? R$\")))", "[] for i in range(20): vetor.append(random.randint(0, 99)) print(\"Números gerados:\", vetor) print(\"Números ordenados:\", sorted(vetor))", "idades = [] table = [] for i in range(9): nomes.append(input(\"Digite o seu" ]
[ "/<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do not change this without checking #", "r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), #", "re_path from olympia.addons.urls import ADDON_ID from olympia.amo.views import frontend_view from . import views", "re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns = [ #", "views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ]", "olympia.addons.urls import ADDON_ID from olympia.amo.views import frontend_view from . import views urlpatterns =", "in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path(", "from olympia.amo.views import frontend_view from . import views urlpatterns = [ re_path(r'^$', frontend_view,", "/<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do not change this without", "# is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ) % ADDON_ID,", "= [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns =", "r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id #", "in File.get_url_path(): do not change this without checking # with Fenix first, the", "id # is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ) %", "change this without checking # with Fenix first, the pattern is hardcoded in", "this without checking # with Fenix first, the pattern is hardcoded in their", "), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon", "from olympia.addons.urls import ADDON_ID from olympia.amo.views import frontend_view from . import views urlpatterns", "( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ),", "name='addons.versions.update_info', ), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment", "though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ) % ADDON_ID, views.download_latest, name='downloads.latest', ),", "olympia.amo.views import frontend_view from . import views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'),", "[ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do not change", "Fenix first, the pattern is hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?'", "r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi #", "] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path():", "comment in File.get_url_path(): do not change this without checking # with Fenix first,", "addon id # is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' )", "), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything", "first, the pattern is hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$'", "not change this without checking # with Fenix first, the pattern is hardcoded", "pattern is hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file,", "- everything after the addon id # is ignored though. re_path( ( r'^latest/%s/'", "from django.urls import re_path from olympia.addons.urls import ADDON_ID from olympia.amo.views import frontend_view from", "name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi #", "re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ) % ADDON_ID, views.download_latest, name='downloads.latest', ), ]", "# /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do not change this", "r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi", "See comment in File.get_url_path(): do not change this without checking # with Fenix", "download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do", "with Fenix first, the pattern is hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/'", "views.update_info, name='addons.versions.update_info', ), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See", "django.urls import re_path from olympia.addons.urls import ADDON_ID from olympia.amo.views import frontend_view from .", "from . import views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info,", "code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source,", "ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ) % ADDON_ID, views.download_latest, name='downloads.latest',", "do not change this without checking # with Fenix first, the pattern is", "urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns", "# /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id # is ignored though. re_path(", "File.get_url_path(): do not change this without checking # with Fenix first, the pattern", "# with Fenix first, the pattern is hardcoded in their code. re_path( (", "without checking # with Fenix first, the pattern is hardcoded in their code.", "= [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do not", "checking # with Fenix first, the pattern is hardcoded in their code. re_path(", "ADDON_ID from olympia.amo.views import frontend_view from . import views urlpatterns = [ re_path(r'^$',", "frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi", "), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in", "is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ) % ADDON_ID, views.download_latest,", "import frontend_view from . import views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path(", "), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id # is ignored though.", "re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id", "the addon id # is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$'", "name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id # is ignored", "the pattern is hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ),", "import views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ),", "/latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id # is ignored though. re_path( (", "import ADDON_ID from olympia.amo.views import frontend_view from . import views urlpatterns = [", "is hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file',", "import re_path from olympia.addons.urls import ADDON_ID from olympia.amo.views import frontend_view from . import", "re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source'", "frontend_view from . import views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$',", "everything after the addon id # is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?'", "their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)',", "[ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns = [", ". import views urlpatterns = [ re_path(r'^$', frontend_view, name='addons.versions'), re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info',", "views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the addon id # is", "r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi -", "# See comment in File.get_url_path(): do not change this without checking # with", "re_path( r'^(?P<version_num>[^/]+)/updateinfo/$', views.update_info, name='addons.versions.update_info', ), ] download_patterns = [ # /<locale>/<app>/file/<id>/filename.xpi # /<locale>/<app>/file/<id>/type:attachment/filename.xpi", "name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after the", "views.download_file, name='downloads.file', ), re_path( r'^source/(?P<version_id>\\d+)', views.download_source, name='downloads.source' ), # /latest/<id>/type:xpi/platform:5/lol.xpi - everything after", "after the addon id # is ignored though. re_path( ( r'^latest/%s/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:platform:(?P<platform>\\d+)/)?'", "hardcoded in their code. re_path( ( r'^file/(?P<file_id>\\d+)/' r'(?:type:(?P<download_type>\\w+)/)?' r'(?:(?P<filename>[\\w+.-]*))?$' ), views.download_file, name='downloads.file', ),", "# /<locale>/<app>/file/<id>/type:attachment/filename.xpi # See comment in File.get_url_path(): do not change this without checking" ]
[ "range(2015, 2415): if isLeap(i): y += 366 else: y += 365 if y", "year % 400 == 0: return True else: return False leap400 = 0", "isLeap(year): if (year % 4 == 0 and year % 100 != 0)", "[] y = 0 for i in range(2015, 2415): if isLeap(i): y +=", "7 == 0: leap400 += 1 leap400List.append(leap400) print(leap400 * ((N-2014) // 400) +", "y += 366 else: y += 365 if y % 7 == 0:", "!= 0) or year % 400 == 0: return True else: return False", "+= 366 else: y += 365 if y % 7 == 0: leap400", "year % 100 != 0) or year % 400 == 0: return True", "0 for i in range(2015, 2415): if isLeap(i): y += 366 else: y", "and year % 100 != 0) or year % 400 == 0: return", "else: return False leap400 = 0 leap400List = [] y = 0 for", "if (year % 4 == 0 and year % 100 != 0) or", "y = 0 for i in range(2015, 2415): if isLeap(i): y += 366", "= 0 leap400List = [] y = 0 for i in range(2015, 2415):", "+= 365 if y % 7 == 0: leap400 += 1 leap400List.append(leap400) print(leap400", "0: return True else: return False leap400 = 0 leap400List = [] y", "else: y += 365 if y % 7 == 0: leap400 += 1", "== 0: leap400 += 1 leap400List.append(leap400) print(leap400 * ((N-2014) // 400) + leap400List[(N-2014)%400-1])", "(year % 4 == 0 and year % 100 != 0) or year", "== 0: return True else: return False leap400 = 0 leap400List = []", "2415): if isLeap(i): y += 366 else: y += 365 if y %", "leap400List = [] y = 0 for i in range(2015, 2415): if isLeap(i):", "N = int(input()) def isLeap(year): if (year % 4 == 0 and year", "= int(input()) def isLeap(year): if (year % 4 == 0 and year %", "i in range(2015, 2415): if isLeap(i): y += 366 else: y += 365", "% 100 != 0) or year % 400 == 0: return True else:", "% 400 == 0: return True else: return False leap400 = 0 leap400List", "y % 7 == 0: leap400 += 1 leap400List.append(leap400) print(leap400 * ((N-2014) //", "4 == 0 and year % 100 != 0) or year % 400", "int(input()) def isLeap(year): if (year % 4 == 0 and year % 100", "100 != 0) or year % 400 == 0: return True else: return", "% 7 == 0: leap400 += 1 leap400List.append(leap400) print(leap400 * ((N-2014) // 400)", "== 0 and year % 100 != 0) or year % 400 ==", "= 0 for i in range(2015, 2415): if isLeap(i): y += 366 else:", "True else: return False leap400 = 0 leap400List = [] y = 0", "y += 365 if y % 7 == 0: leap400 += 1 leap400List.append(leap400)", "400 == 0: return True else: return False leap400 = 0 leap400List =", "def isLeap(year): if (year % 4 == 0 and year % 100 !=", "if isLeap(i): y += 366 else: y += 365 if y % 7", "0 leap400List = [] y = 0 for i in range(2015, 2415): if", "0 and year % 100 != 0) or year % 400 == 0:", "= [] y = 0 for i in range(2015, 2415): if isLeap(i): y", "0) or year % 400 == 0: return True else: return False leap400", "leap400 = 0 leap400List = [] y = 0 for i in range(2015,", "or year % 400 == 0: return True else: return False leap400 =", "return True else: return False leap400 = 0 leap400List = [] y =", "isLeap(i): y += 366 else: y += 365 if y % 7 ==", "return False leap400 = 0 leap400List = [] y = 0 for i", "for i in range(2015, 2415): if isLeap(i): y += 366 else: y +=", "if y % 7 == 0: leap400 += 1 leap400List.append(leap400) print(leap400 * ((N-2014)", "366 else: y += 365 if y % 7 == 0: leap400 +=", "% 4 == 0 and year % 100 != 0) or year %", "365 if y % 7 == 0: leap400 += 1 leap400List.append(leap400) print(leap400 *", "False leap400 = 0 leap400List = [] y = 0 for i in", "in range(2015, 2415): if isLeap(i): y += 366 else: y += 365 if" ]
[ "class Solution(object): def threeSum(self, nums): \"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\" ans", "elif sum > 0: right -=1 else: ans.append([num_0, nums[left], nums[right]]) while left <", "and nums[i] == nums[i-1]: continue left = i+1 right = len(nums)-1 while left", "Solution(object): def threeSum(self, nums): \"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\" ans =", "nums): \"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\" ans = list() nums =", "2, -1, -4] solution = Solution() ans = solution.threeSum(nums) print(ans) if __name__=='__main__': test()", "nums[left], nums[right]]) while left < right and nums[left]==nums[left+1]: left +=1 while left <", "\"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\" ans = list() nums = sorted(nums)", "1 elif sum > 0: right -=1 else: ans.append([num_0, nums[left], nums[right]]) while left", "1 return ans def test(): nums = [-1, 0, 1, 2, -1, -4]", "i>0 and nums[i] == nums[i-1]: continue left = i+1 right = len(nums)-1 while", "> 0: right -=1 else: ans.append([num_0, nums[left], nums[right]]) while left < right and", "nums = sorted(nums) for i, num_0 in enumerate(nums): if i>0 and nums[i] ==", "< right: sum = num_0 + nums[left] + nums[right] if sum < 0:", "0, 1, 2, -1, -4] solution = Solution() ans = solution.threeSum(nums) print(ans) if", "right = len(nums)-1 while left < right: sum = num_0 + nums[left] +", "nums[right]==nums[right-1]: right -=1 left += 1 right -= 1 return ans def test():", "List[int] :rtype: List[List[int]] \"\"\" ans = list() nums = sorted(nums) for i, num_0", "nums[right] if sum < 0: left += 1 elif sum > 0: right", "nums[left]==nums[left+1]: left +=1 while left < right and nums[right]==nums[right-1]: right -=1 left +=", "left += 1 right -= 1 return ans def test(): nums = [-1,", "else: ans.append([num_0, nums[left], nums[right]]) while left < right and nums[left]==nums[left+1]: left +=1 while", "< right and nums[left]==nums[left+1]: left +=1 while left < right and nums[right]==nums[right-1]: right", "test(): nums = [-1, 0, 1, 2, -1, -4] solution = Solution() ans", "sorted(nums) for i, num_0 in enumerate(nums): if i>0 and nums[i] == nums[i-1]: continue", "-=1 left += 1 right -= 1 return ans def test(): nums =", "+ nums[left] + nums[right] if sum < 0: left += 1 elif sum", "== nums[i-1]: continue left = i+1 right = len(nums)-1 while left < right:", "right -=1 else: ans.append([num_0, nums[left], nums[right]]) while left < right and nums[left]==nums[left+1]: left", "= len(nums)-1 while left < right: sum = num_0 + nums[left] + nums[right]", "def threeSum(self, nums): \"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\" ans = list()", "if i>0 and nums[i] == nums[i-1]: continue left = i+1 right = len(nums)-1", "and nums[left]==nums[left+1]: left +=1 while left < right and nums[right]==nums[right-1]: right -=1 left", "and nums[right]==nums[right-1]: right -=1 left += 1 right -= 1 return ans def", "for i, num_0 in enumerate(nums): if i>0 and nums[i] == nums[i-1]: continue left", "0: left += 1 elif sum > 0: right -=1 else: ans.append([num_0, nums[left],", "+= 1 elif sum > 0: right -=1 else: ans.append([num_0, nums[left], nums[right]]) while", ":rtype: List[List[int]] \"\"\" ans = list() nums = sorted(nums) for i, num_0 in", "list() nums = sorted(nums) for i, num_0 in enumerate(nums): if i>0 and nums[i]", "if sum < 0: left += 1 elif sum > 0: right -=1", "right: sum = num_0 + nums[left] + nums[right] if sum < 0: left", "<reponame>JiJingYu/LeetCode_practice<gh_stars>0 class Solution(object): def threeSum(self, nums): \"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\"", "in enumerate(nums): if i>0 and nums[i] == nums[i-1]: continue left = i+1 right", "left < right and nums[left]==nums[left+1]: left +=1 while left < right and nums[right]==nums[right-1]:", "+=1 while left < right and nums[right]==nums[right-1]: right -=1 left += 1 right", "0: right -=1 else: ans.append([num_0, nums[left], nums[right]]) while left < right and nums[left]==nums[left+1]:", ":type nums: List[int] :rtype: List[List[int]] \"\"\" ans = list() nums = sorted(nums) for", "= sorted(nums) for i, num_0 in enumerate(nums): if i>0 and nums[i] == nums[i-1]:", "right -= 1 return ans def test(): nums = [-1, 0, 1, 2,", "= [-1, 0, 1, 2, -1, -4] solution = Solution() ans = solution.threeSum(nums)", "1 right -= 1 return ans def test(): nums = [-1, 0, 1,", "= list() nums = sorted(nums) for i, num_0 in enumerate(nums): if i>0 and", "right and nums[left]==nums[left+1]: left +=1 while left < right and nums[right]==nums[right-1]: right -=1", "left +=1 while left < right and nums[right]==nums[right-1]: right -=1 left += 1", "while left < right and nums[right]==nums[right-1]: right -=1 left += 1 right -=", "right -=1 left += 1 right -= 1 return ans def test(): nums", "[-1, 0, 1, 2, -1, -4] solution = Solution() ans = solution.threeSum(nums) print(ans)", "+ nums[right] if sum < 0: left += 1 elif sum > 0:", "right and nums[right]==nums[right-1]: right -=1 left += 1 right -= 1 return ans", "ans.append([num_0, nums[left], nums[right]]) while left < right and nums[left]==nums[left+1]: left +=1 while left", "nums = [-1, 0, 1, 2, -1, -4] solution = Solution() ans =", "nums[i-1]: continue left = i+1 right = len(nums)-1 while left < right: sum", "return ans def test(): nums = [-1, 0, 1, 2, -1, -4] solution", "+= 1 right -= 1 return ans def test(): nums = [-1, 0,", "nums[right]]) while left < right and nums[left]==nums[left+1]: left +=1 while left < right", "i+1 right = len(nums)-1 while left < right: sum = num_0 + nums[left]", "< 0: left += 1 elif sum > 0: right -=1 else: ans.append([num_0,", "-=1 else: ans.append([num_0, nums[left], nums[right]]) while left < right and nums[left]==nums[left+1]: left +=1", "ans def test(): nums = [-1, 0, 1, 2, -1, -4] solution =", "i, num_0 in enumerate(nums): if i>0 and nums[i] == nums[i-1]: continue left =", "nums[left] + nums[right] if sum < 0: left += 1 elif sum >", "< right and nums[right]==nums[right-1]: right -=1 left += 1 right -= 1 return", "-= 1 return ans def test(): nums = [-1, 0, 1, 2, -1,", "nums: List[int] :rtype: List[List[int]] \"\"\" ans = list() nums = sorted(nums) for i,", "nums[i] == nums[i-1]: continue left = i+1 right = len(nums)-1 while left <", "List[List[int]] \"\"\" ans = list() nums = sorted(nums) for i, num_0 in enumerate(nums):", "num_0 in enumerate(nums): if i>0 and nums[i] == nums[i-1]: continue left = i+1", "sum > 0: right -=1 else: ans.append([num_0, nums[left], nums[right]]) while left < right", "= i+1 right = len(nums)-1 while left < right: sum = num_0 +", "= num_0 + nums[left] + nums[right] if sum < 0: left += 1", "threeSum(self, nums): \"\"\" :type nums: List[int] :rtype: List[List[int]] \"\"\" ans = list() nums", "\"\"\" ans = list() nums = sorted(nums) for i, num_0 in enumerate(nums): if", "num_0 + nums[left] + nums[right] if sum < 0: left += 1 elif", "left < right: sum = num_0 + nums[left] + nums[right] if sum <", "while left < right: sum = num_0 + nums[left] + nums[right] if sum", "left += 1 elif sum > 0: right -=1 else: ans.append([num_0, nums[left], nums[right]])", "enumerate(nums): if i>0 and nums[i] == nums[i-1]: continue left = i+1 right =", "continue left = i+1 right = len(nums)-1 while left < right: sum =", "1, 2, -1, -4] solution = Solution() ans = solution.threeSum(nums) print(ans) if __name__=='__main__':", "sum = num_0 + nums[left] + nums[right] if sum < 0: left +=", "left < right and nums[right]==nums[right-1]: right -=1 left += 1 right -= 1", "while left < right and nums[left]==nums[left+1]: left +=1 while left < right and", "ans = list() nums = sorted(nums) for i, num_0 in enumerate(nums): if i>0", "len(nums)-1 while left < right: sum = num_0 + nums[left] + nums[right] if", "sum < 0: left += 1 elif sum > 0: right -=1 else:", "def test(): nums = [-1, 0, 1, 2, -1, -4] solution = Solution()", "left = i+1 right = len(nums)-1 while left < right: sum = num_0" ]
[ "ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message':", "speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params))", "not device_model_id: raise Exception('Missing --device-model-id option') # Re-register if \"device_model_id\" is given by", "KIND, either express or implied. # See the License for the specific language", "language governing permissions and # limitations under the License. from __future__ import print_function", "Unless required by applicable law or agreed to in writing, software # distributed", "up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register if \"device_id\" is different from", "'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type':", "to write the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control credentials')", "= str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans", "'command': command, 'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command ==", "self.filename = filename def write(self, message): with open(self.filename, \"a\") as f: f.write(message) def", "= google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id = None try: with open(args.device_config) as", "me \" + x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str,", "EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as", "as squeezebox import sys import datetime try: FileNotFoundError except NameError: FileNotFoundError = IOError", "write(self, message): with open(self.filename, \"a\") as f: f.write(message) def flush(self): pass def log(x):", "this file except in compliance with the License. # You may obtain a", "in event.actions: log({'type': 'device action', 'command': command, 'params': params}) try: if command ==", "args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as assistant: events = assistant.start() device_id =", "= assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register", "store and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ),", "flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event):", "\"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command ==", "args = parser.parse_args() if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials, 'r')", "ANY KIND, either express or implied. # See the License for the specific", "or device_model_id with Assistant(credentials, device_model_id) as assistant: events = assistant.start() device_id = assistant.device_id", "+ Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write the log to') parser.add_argument('--home_control_credentials', type=str,", "metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store and read OAuth2 credentials')", "command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type':", "== EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type': 'device action', 'command': command, 'params':", "event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google", "sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id", "json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event", "command, 'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\":", "assistant_squeezebox_controller as squeezebox import sys import datetime try: FileNotFoundError except NameError: FileNotFoundError =", "args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f: json.dump({", "\"device_id\" is different from the last \"device_id\": if should_register or (device_id != last_device_id):", "ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\":", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "# Copyright (C) 2017 Google Inc. # # Licensed under the Apache License,", "= None try: with open(args.device_config) as f: device_config = json.load(f) device_model_id = device_config['model_id']", "'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response',", "if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]:", "help='path to store and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool',", "specific language governing permissions and # limitations under the License. from __future__ import", "ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message':", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "pass if not args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id option') # Re-register", "event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'})", "device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events:", "as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id = None try:", "from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox import", "different from the last \"device_id\": if should_register or (device_id != last_device_id): if args.project_id:", "OF ANY KIND, either express or implied. # See the License for the", "= str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans", "elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans})", "log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command ==", "args.device_model_id and args.device_model_id != device_model_id) device_model_id = args.device_model_id or device_model_id with Assistant(credentials, device_model_id)", "import json import os.path import pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library import", "json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat", "= str(e) speak(e) log({'type': 'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech',", "response', 'message': e}) except Exception as e: e = str(e) speak(e) log({'type': 'exception',", "== \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command", "Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write the log to') parser.add_argument('--home_control_credentials', type=str, required=True,", "device_model_id = args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as assistant: events = assistant.start()", "parser.add_argument('--logfile', type=str, required=False, help='file to write the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path", "parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID used to register this device')", "'last_device_id': device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in", "device in Assistant Settings. In order to register this device follow instructions at:", "metavar='PROJECT_ID', required=False, help='the project ID used to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE',", "== \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command", "log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event): \"\"\" Args: event(event.Event):", "and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path", "Actions or see your device in Assistant Settings. In order to register this", "global speak def speak(x): assistant.send_text_query(\"repeat after me \" + x) def main(): parser", "now print(x) def process_event(event): \"\"\" Args: event(event.Event): The current event to process. \"\"\"", "import pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event import", "'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif", "!= last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w')", "}, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events: process_event(event) if __name__", "== \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command", "with open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f) else:", "type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store and read device", "\"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type': 'device action',", "credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after me \" + x) def main():", "== \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params)", "device_config.get('last_device_id', None) except FileNotFoundError: pass if not args.device_model_id and not device_model_id: raise Exception('Missing", "'squeezebox response', 'message': e}) except Exception as e: e = str(e) speak(e) log({'type':", "log({'type': 'google response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type", "parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered with Google') parser.add_argument('--project-id',", "EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type", "as f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global", "squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command", "squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e = str(e)", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "None try: with open(args.device_config) as f: device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id", "as f: device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\":", "speak def speak(x): assistant.send_text_query(\"repeat after me \" + x) def main(): parser =", "if \"device_model_id\" is given by the user and it differs # from what", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store and read device configuration') parser.add_argument('--credentials', type=existing_file,", "# Re-register if \"device_id\" is different from the last \"device_id\": if should_register or", "except squeezebox.UserException as e: e = str(e) speak(e) log({'type': 'squeezebox response', 'message': e})", "the License. from __future__ import print_function import argparse import json import os.path import", "device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id,", "device is not registered. This means you will not be able to use", "squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as f:", "required by applicable law or agreed to in writing, software # distributed under", "None) except FileNotFoundError: pass if not args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id", "as e: e = str(e) speak(e) log({'type': 'squeezebox response', 'message': e}) except Exception", "log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans)", "\"\"\" Args: event(event.Event): The current event to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION:", "applicable law or agreed to in writing, software # distributed under the License", "Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID used to register this", "main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after me \" + x)", "device_id }) # Re-register if \"device_id\" is different from the last \"device_id\": if", "device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store and", "type=str, required=True, help='path of home control credentials') args = parser.parse_args() if args.logfile: sys.stdout", "datetime try: FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This device", "or agreed to in writing, software # distributed under the License is distributed", "print(x) def process_event(event): \"\"\" Args: event(event.Event): The current event to process. \"\"\" if", "limitations under the License. from __future__ import print_function import argparse import json import", "filename): self.filename = filename def write(self, message): with open(self.filename, \"a\") as f: f.write(message)", "assistant: events = assistant.start() device_id = assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id,", "str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif", "google.assistant.library import Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers", "import assistant_squeezebox_controller as squeezebox import sys import datetime try: FileNotFoundError except NameError: FileNotFoundError", "home control credentials') args = parser.parse_args() if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile)", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control credentials') args = parser.parse_args() if args.logfile:", "\"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except", "now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event): \"\"\" Args: event(event.Event): The", "required=False, help='the device model ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False,", "squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException", "'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif event.type", "as f: f.write(message) def flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] =", "NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This device is not registered. This", "response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox", "is not registered. This means you will not be able to use Device", "governing permissions and # limitations under the License. from __future__ import print_function import", "elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans})", "class Logger(object): def __init__(self, filename): self.filename = filename def write(self, message): with open(self.filename,", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif event.type ==", "assistant.send_text_query(\"repeat after me \" + x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id',", "writing, software # distributed under the License is distributed on an \"AS IS\"", "speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params))", "== \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command", "pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event): \"\"\"", "'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as f: creds =", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def", "device_id = assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) #", "License. # You may obtain a copy of the License at # #", "credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id,", "permissions and # limitations under the License. from __future__ import print_function import argparse", "EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text':", "device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename): self.filename =", "'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE,", "global squeeze_controller with open(credentials_path, \"r\") as f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'],", "= json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x):", "and it differs # from what we previously registered with. should_register = (", "log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as f: creds", "in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type':", "credentials') args = parser.parse_args() if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials,", "compliance with the License. # You may obtain a copy of the License", "filename def write(self, message): with open(self.filename, \"a\") as f: f.write(message) def flush(self): pass", "to store and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__())", "'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]:", "the last \"device_id\": if should_register or (device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials,", "str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans =", "json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass if not", "datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event): \"\"\" Args: event(event.Event): The current event", "}) # Re-register if \"device_id\" is different from the last \"device_id\": if should_register", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "import Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import", "'googlesamples-assistant', 'device_config_library.json' ), help='path to store and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE',", "== \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e = str(e) speak(e) log({'type': 'squeezebox", "\"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register if \"device_id\" is different", "except FileNotFoundError: pass if not args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id option')", "e: e = str(e) speak(e) log({'type': 'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED:", "read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to", "squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e = str(e) speak(e) log({'type': 'squeezebox response', 'message':", "sys import datetime try: FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\"", "command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\":", "= json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass if", "your device in Assistant Settings. In order to register this device follow instructions", "and not device_model_id: raise Exception('Missing --device-model-id option') # Re-register if \"device_model_id\" is given", "pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, },", "as assistant: events = assistant.start() device_id = assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\":", "response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox", "should_register = ( args.device_model_id and args.device_model_id != device_model_id) device_model_id = args.device_model_id or device_model_id", "not use this file except in compliance with the License. # You may", "required=False, help='the project ID used to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join(", "or see your device in Assistant Settings. In order to register this device", "x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the", "FileNotFoundError: pass if not args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id option') #", "command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif", "\"a\") as f: f.write(message) def flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time']", "'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type ==", "try: with open(args.device_config) as f: device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id =", "EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox", "License, Version 2.0 (the \"License\"); # you may not use this file except", "__future__ import print_function import argparse import json import os.path import pathlib2 as pathlib", "able to use Device Actions or see your device in Assistant Settings. In", "assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register if", "Assistant Settings. In order to register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\"", "metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store and read device configuration')", "+ x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "help='the project ID used to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'),", "def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after me \" + x) def", "registered. This means you will not be able to use Device Actions or", "== EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response',", "command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif", "# you may not use this file except in compliance with the License.", "squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after", "and args.device_model_id != device_model_id) device_model_id = args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as", "log({'type': 'squeezebox response', 'message': e}) except Exception as e: e = str(e) speak(e)", "help='path to store and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' +", "log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans)", "command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e = str(e) speak(e) log({'type':", "used to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ),", "agreed to in writing, software # distributed under the License is distributed on", "), help='path to store and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s '", "if event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type': 'device action', 'command':", "else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events: process_event(event) if __name__ == '__main__':", "speak(x): assistant.send_text_query(\"repeat after me \" + x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter)", "current event to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command, params in", "(the \"License\"); # you may not use this file except in compliance with", "log({'type': 'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif", "google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id = None try: with open(args.device_config) as f:", "configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store and", "License. from __future__ import print_function import argparse import json import os.path import pathlib2", "EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type': 'device action', 'command': command, 'params': params})", "\"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register if \"device_id\" is", "# Unless required by applicable law or agreed to in writing, software #", "== \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox", "device model ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project", "by applicable law or agreed to in writing, software # distributed under the", "# limitations under the License. from __future__ import print_function import argparse import json", "), help='path to store and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'),", "pathlib import google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params)", "and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str,", "f: f.write(message) def flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now", "instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename): self.filename = filename def", "Inc. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "with open(self.filename, \"a\") as f: f.write(message) def flush(self): pass def log(x): now =", "required=False, help='file to write the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home", "\"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command ==", "argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered with", "device_model_id = None last_device_id = None try: with open(args.device_config) as f: device_config =", "command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif", "file except in compliance with the License. # You may obtain a copy", "\"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif", "'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response',", "squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after me \"", "Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device", "metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID',", "write the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control credentials') args", "= device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass if not args.device_model_id and", "python # Copyright (C) 2017 Google Inc. # # Licensed under the Apache", "License for the specific language governing permissions and # limitations under the License.", "\" + x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID',", "Re-register if \"device_id\" is different from the last \"device_id\": if should_register or (device_id", "register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f: json.dump({ 'last_device_id':", "'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global", "to in writing, software # distributed under the License is distributed on an", "'device action', 'command': command, 'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif", "creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "store and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile',", "elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e = str(e) speak(e)", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "from what we previously registered with. should_register = ( args.device_model_id and args.device_model_id !=", "means you will not be able to use Device Actions or see your", "def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as f: creds = json.loads(f.read()) squeeze_controller", "by the user and it differs # from what we previously registered with.", "should_register or (device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True)", "order to register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def", "= argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered", "\"device_id\": if should_register or (device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id)", "response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox", "of home control credentials') args = parser.parse_args() if args.logfile: sys.stdout = sys.stderr =", "[EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return", "f: device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError:", "not args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id option') # Re-register if \"device_model_id\"", "log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans)", "or implied. # See the License for the specific language governing permissions and", "squeezebox import sys import datetime try: FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED", "= str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans", "at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename): self.filename = filename def write(self,", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "last \"device_id\": if should_register or (device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id,", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Device Actions or see your device in Assistant Settings. In order to register", "is given by the user and it differs # from what we previously", "to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control credentials') args = parser.parse_args() if", "Assistant(credentials, device_model_id) as assistant: events = assistant.start() device_id = assistant.device_id log({ \"type\": \"starting", "= None last_device_id = None try: with open(args.device_config) as f: device_config = json.load(f)", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id = None try: with", "Exception as e: e = str(e) speak(e) log({'type': 'exception', 'message': e}) elif event.type", "in writing, software # distributed under the License is distributed on an \"AS", "Exception('Missing --device-model-id option') # Re-register if \"device_model_id\" is given by the user and", "squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message':", "= filename def write(self, message): with open(self.filename, \"a\") as f: f.write(message) def flush(self):", "log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path):", "squeezebox.UserException as e: e = str(e) speak(e) log({'type': 'squeezebox response', 'message': e}) except", "' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write the log to') parser.add_argument('--home_control_credentials',", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "type=str, metavar='PROJECT_ID', required=False, help='the project ID used to register this device') parser.add_argument('--device-config', type=str,", "= str(e) speak(e) log({'type': 'squeezebox response', 'message': e}) except Exception as e: e", "response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED,", "\"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command ==", "as pathlib import google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event import EventType from", "= device_config.get('last_device_id', None) except FileNotFoundError: pass if not args.device_model_id and not device_model_id: raise", "as e: e = str(e) speak(e) log({'type': 'exception', 'message': e}) elif event.type ==", "The current event to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command, params", "register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename):", "'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type':", "import sys import datetime try: FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED =", "open(args.device_config) as f: device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None)", "given by the user and it differs # from what we previously registered", "e = str(e) speak(e) log({'type': 'squeezebox response', 'message': e}) except Exception as e:", "if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as f: credentials", "parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store and read", "device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id, 'model_id':", "FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This device is not", "log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans)", "help='file to write the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "e: e = str(e) speak(e) log({'type': 'squeezebox response', 'message': e}) except Exception as", "str(e) speak(e) log({'type': 'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text':", "from the last \"device_id\": if should_register or (device_id != last_device_id): if args.project_id: register_device(args.project_id,", "you may not use this file except in compliance with the License. #", "device_model_id) as assistant: events = assistant.start() device_id = assistant.device_id log({ \"type\": \"starting up\",", "args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id option') # Re-register if \"device_model_id\" is", "\"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register if \"device_id\" is different from the", "in Assistant Settings. In order to register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device", "the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control credentials') args =", "formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered with Google')", "= ( args.device_model_id and args.device_model_id != device_model_id) device_model_id = args.device_model_id or device_model_id with", "parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write", "parser.parse_args() if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as f:", "setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as f: creds = json.loads(f.read()) squeeze_controller =", "'--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered with Google') parser.add_argument('--project-id', '--project_id',", "google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller", "\"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e = str(e) speak(e) log({'type': 'squeezebox response',", "use this file except in compliance with the License. # You may obtain", "== \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params)", "type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str,", "str(e) speak(e) log({'type': 'squeezebox response', 'message': e}) except Exception as e: e =", "exist_ok=True) with open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f)", "with open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id", "ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message':", "default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store and read device configuration') parser.add_argument('--credentials',", "os.path import pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event", "def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event): \"\"\" Args:", "device_model_id) device_model_id = args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as assistant: events =", "'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response',", "existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox import sys import datetime", "open(credentials_path, \"r\") as f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def", "speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params))", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "= parser.parse_args() if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as", "default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store and read OAuth2 credentials') parser.add_argument('-v',", "e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE:", "open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id =", "from __future__ import print_function import argparse import json import os.path import pathlib2 as", "to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path", "Google Inc. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "def flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def", "e}) except Exception as e: e = str(e) speak(e) log({'type': 'exception', 'message': e})", "--device-model-id option') # Re-register if \"device_model_id\" is given by the user and it", "and # limitations under the License. from __future__ import print_function import argparse import", "will not be able to use Device Actions or see your device in", "ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message':", "**json.load(f)) device_model_id = None last_device_id = None try: with open(args.device_config) as f: device_config", "2.0 (the \"License\"); # you may not use this file except in compliance", "'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif", "IOError WARNING_NOT_REGISTERED = \"\"\" This device is not registered. This means you will", "device_model_id, \"device_id\": device_id }) # Re-register if \"device_id\" is different from the last", "import EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as", "this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to store", "log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of home control credentials') args = parser.parse_args()", "google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file", "elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as", "if \"device_id\" is different from the last \"device_id\": if should_register or (device_id !=", "== EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'})", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "be able to use Device Actions or see your device in Assistant Settings.", "import argparse import json import os.path import pathlib2 as pathlib import google.oauth2.credentials from", "what we previously registered with. should_register = ( args.device_model_id and args.device_model_id != device_model_id)", "def speak(x): assistant.send_text_query(\"repeat after me \" + x) def main(): parser = argparse.ArgumentParser(", "from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox import sys import datetime try:", "# # Unless required by applicable law or agreed to in writing, software", "previously registered with. should_register = ( args.device_model_id and args.device_model_id != device_model_id) device_model_id =", "express or implied. # See the License for the specific language governing permissions", "from google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device import", "(C) 2017 Google Inc. # # Licensed under the Apache License, Version 2.0", "creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after me \" +", "'w') as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials)", "<reponame>jackoson/squeezebox-google-assistant #!/usr/bin/env python # Copyright (C) 2017 Google Inc. # # Licensed under", "ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\":", "== \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command", "model ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID", "print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events: process_event(event) if __name__ == '__main__': main()", "help='path of home control credentials') args = parser.parse_args() if args.logfile: sys.stdout = sys.stderr", "ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID used", "either express or implied. # See the License for the specific language governing", "speak(e) log({'type': 'squeezebox response', 'message': e}) except Exception as e: e = str(e)", "= datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x) def process_event(event): \"\"\" Args: event(event.Event): The current", "= args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as assistant: events = assistant.start() device_id", "raise Exception('Missing --device-model-id option') # Re-register if \"device_model_id\" is given by the user", "pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event import EventType", "elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans})", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "(device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config,", "'credentials.json' ), help='path to store and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s", "log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in", "version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write the log to')", "[EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\")", "the specific language governing permissions and # limitations under the License. from __future__", "# from what we previously registered with. should_register = ( args.device_model_id and args.device_model_id", "last_device_id = None try: with open(args.device_config) as f: device_config = json.load(f) device_model_id =", "elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller", "\"\"\" This device is not registered. This means you will not be able", "command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\":", "log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id }) # Re-register if \"device_id\"", "command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\":", "we previously registered with. should_register = ( args.device_model_id and args.device_model_id != device_model_id) device_model_id", "\"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command ==", "the License. # You may obtain a copy of the License at #", "see your device in Assistant Settings. In order to register this device follow", "parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store and read", "params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans =", "sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None,", "Settings. In order to register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "speak(e) log({'type': 'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']})", "speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command", "'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response',", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file", "'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type':", "def write(self, message): with open(self.filename, \"a\") as f: f.write(message) def flush(self): pass def", "with open(args.device_config) as f: device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id',", "# Re-register if \"device_model_id\" is given by the user and it differs #", "\"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif", "import datetime try: FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This", "differs # from what we previously registered with. should_register = ( args.device_model_id and", "last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as", "device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store", "with open(credentials_path, \"r\") as f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox'])", "= IOError WARNING_NOT_REGISTERED = \"\"\" This device is not registered. This means you", "elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type':", "command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif", "elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans})", "it differs # from what we previously registered with. should_register = ( args.device_model_id", "register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json' ), help='path to", "not registered. This means you will not be able to use Device Actions", "f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for", "= \"\"\" This device is not registered. This means you will not be", "with the License. # You may obtain a copy of the License at", "args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as f: credentials =", "to register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self,", "== \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params)", "under the License. from __future__ import print_function import argparse import json import os.path", "parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model ID", "response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "google.assistant.library.file_helpers import existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox import sys", "ans = str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\":", "Args: event(event.Event): The current event to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for", "f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak", "( args.device_model_id and args.device_model_id != device_model_id) device_model_id = args.device_model_id or device_model_id with Assistant(credentials,", "\"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response',", "f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events: process_event(event) if __name__ ==", "elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED:", "2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the", "This device is not registered. This means you will not be able to", "this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename): self.filename", "ID used to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant', 'device_config_library.json'", "device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events: process_event(event) if", "'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\":", "#!/usr/bin/env python # Copyright (C) 2017 Google Inc. # # Licensed under the", "register_device import assistant_squeezebox_controller as squeezebox import sys import datetime try: FileNotFoundError except NameError:", "squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command", "law or agreed to in writing, software # distributed under the License is", "registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID used to", "the License for the specific language governing permissions and # limitations under the", "or (device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with", "if should_register or (device_id != last_device_id): if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir(", "Logger(object): def __init__(self, filename): self.filename = filename def write(self, message): with open(self.filename, \"a\")", "event.actions: log({'type': 'device action', 'command': command, 'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\":", "with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID used to register", "elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command ==", "device_model_id: raise Exception('Missing --device-model-id option') # Re-register if \"device_model_id\" is given by the", "\"device_model_id\" is given by the user and it differs # from what we", "'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id = None", "str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSpotifySearch\": ans =", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume()", "= sys.stderr = Logger(args.logfile) with open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f))", "open(args.device_config, 'w') as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED)", "process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type': 'device", "print_function import argparse import json import os.path import pathlib2 as pathlib import google.oauth2.credentials", "if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params)) speak(ans)", "FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This device is not registered. This means", "EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'})", "params in event.actions: log({'type': 'device action', 'command': command, 'params': params}) try: if command", "def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device", "as f: json.dump({ 'last_device_id': device_id, 'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant)", "main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id', type=str, metavar='DEVICE_MODEL_ID', required=False, help='the device model", "event(event.Event): The current event to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command,", "Logger(args.logfile) with open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None", "argparse import json import os.path import pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library", "= now print(x) def process_event(event): \"\"\" Args: event(event.Event): The current event to process.", "Copyright (C) 2017 Google Inc. # # Licensed under the Apache License, Version", "== \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command", "log({'type': 'device action', 'command': command, 'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params)", "args.device_model_id != device_model_id) device_model_id = args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as assistant:", "EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if", "ans = str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\":", "elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type':", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "import register_device import assistant_squeezebox_controller as squeezebox import sys import datetime try: FileNotFoundError except", "process_event(event): \"\"\" Args: event(event.Event): The current event to process. \"\"\" if event.type ==", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "= squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant): global speak def speak(x): assistant.send_text_query(\"repeat after me", "def process_event(event): \"\"\" Args: event(event.Event): The current event to process. \"\"\" if event.type", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "not be able to use Device Actions or see your device in Assistant", "'message': e}) except Exception as e: e = str(e) speak(e) log({'type': 'exception', 'message':", "== EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet() log({'type': 'quiet'}) elif", "assistant.start() device_id = assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id })", "read OAuth2 credentials') parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False,", "events = assistant.start() device_id = assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\":", "See the License for the specific language governing permissions and # limitations under", "x['time'] = now print(x) def process_event(event): \"\"\" Args: event(event.Event): The current event to", "to use Device Actions or see your device in Assistant Settings. In order", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "!= device_model_id) device_model_id = args.device_model_id or device_model_id with Assistant(credentials, device_model_id) as assistant: events", "In order to register this device follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object):", "command == \"com.example.commands.SqueezeBoxPlayEnd\": ans = str(squeeze_controller.search_and_play_end(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif", "import os.path import pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library import Assistant from", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "to store and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json'", "in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path,", "This means you will not be able to use Device Actions or see", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id = None last_device_id = None try: with open(args.device_config)", "try: FileNotFoundError except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This device is", "if args.project_id: register_device(args.project_id, credentials, device_model_id, device_id) pathlib.Path(os.path.dirname(args.device_config)).mkdir( exist_ok=True) with open(args.device_config, 'w') as f:", "follow instructions at: https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename): self.filename = filename", "event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in [EventType.ON_CONVERSATION_TURN_STARTED, EventType.ON_RESPONDING_STARTED]: squeeze_controller.quiet()", "command, params in event.actions: log({'type': 'device action', 'command': command, 'params': params}) try: if", "except NameError: FileNotFoundError = IOError WARNING_NOT_REGISTERED = \"\"\" This device is not registered.", "google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox import sys import datetime try: FileNotFoundError", "open(self.filename, \"a\") as f: f.write(message) def flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X')", "message): with open(self.filename, \"a\") as f: f.write(message) def flush(self): pass def log(x): now", "the user and it differs # from what we previously registered with. should_register", "'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans = str(squeeze_controller.search_and_play(params)) speak(ans) log({'type':", "\"r\") as f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'], main_squeezebox=creds['nearest_squeezebox']) def setup_speech(assistant):", "you will not be able to use Device Actions or see your device", "None last_device_id = None try: with open(args.device_config) as f: device_config = json.load(f) device_model_id", "'google-oauthlib-tool', 'credentials.json' ), help='path to store and read OAuth2 credentials') parser.add_argument('-v', '--version', action='version',", "def __init__(self, filename): self.filename = filename def write(self, message): with open(self.filename, \"a\") as", "Version 2.0 (the \"License\"); # you may not use this file except in", "for the specific language governing permissions and # limitations under the License. from", "except in compliance with the License. # You may obtain a copy of", "\"device_id\": device_id }) # Re-register if \"device_id\" is different from the last \"device_id\":", "action', 'command': command, 'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command", "event.type in [EventType.ON_END_OF_UTTERANCE, EventType.ON_RESPONDING_FINISHED]: squeeze_controller.return_volume() log({'type': 'return volume'}) def setup_controllers(credentials_path): global squeeze_controller with", "= str(squeeze_controller.spotify_search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params)", "f.write(message) def flush(self): pass def log(x): now = datetime.datetime.now().strftime('%F_%X') x['time'] = now print(x)", "WARNING_NOT_REGISTERED = \"\"\" This device is not registered. This means you will not", "= str(squeeze_controller.simple_query(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxSearch\": ans", "import print_function import argparse import json import os.path import pathlib2 as pathlib import", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "option') # Re-register if \"device_model_id\" is given by the user and it differs", "after me \" + x) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--device-model-id', '--device_model_id',", "project ID used to register this device') parser.add_argument('--device-config', type=str, metavar='DEVICE_CONFIG_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'googlesamples-assistant',", "json import os.path import pathlib2 as pathlib import google.oauth2.credentials from google.assistant.library import Assistant", "import existing_file from google.assistant.library.device_helpers import register_device import assistant_squeezebox_controller as squeezebox import sys import", "os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store and read OAuth2 credentials') parser.add_argument('-v', '--version',", "= Logger(args.logfile) with open(args.credentials, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) device_model_id =", "str(squeeze_controller.search_and_play(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans =", "to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type':", "'params': params}) try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans", "control credentials') args = parser.parse_args() if args.logfile: sys.stdout = sys.stderr = Logger(args.logfile) with", "\"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command ==", "\"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e: e =", "required=True, help='path of home control credentials') args = parser.parse_args() if args.logfile: sys.stdout =", "elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans})", "e = str(e) speak(e) log({'type': 'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type':", "last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass if not args.device_model_id and not device_model_id:", "user and it differs # from what we previously registered with. should_register =", "try: if command == \"com.example.commands.SqueezeBoxCommand\": squeeze_controller.simple_command(params) elif command == \"com.example.commands.SqueezeBoxQuery\": ans = str(squeeze_controller.simple_query(params))", "device_model_id with Assistant(credentials, device_model_id) as assistant: events = assistant.start() device_id = assistant.device_id log({", "'google response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type': 'listening'}) if event.type in", "Re-register if \"device_model_id\" is given by the user and it differs # from", "device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass if not args.device_model_id and not", "log({'type': 'speech', 'text': event.args['text']}) elif event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']})", "for command, params in event.actions: log({'type': 'device action', 'command': command, 'params': params}) try:", "help='the device model ID registered with Google') parser.add_argument('--project-id', '--project_id', type=str, metavar='PROJECT_ID', required=False, help='the", "action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write the log", "device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass if not args.device_model_id", "device_config = json.load(f) device_model_id = device_config['model_id'] last_device_id = device_config.get('last_device_id', None) except FileNotFoundError: pass", "volume'}) def setup_controllers(credentials_path): global squeeze_controller with open(credentials_path, \"r\") as f: creds = json.loads(f.read())", "from google.assistant.library import Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers import existing_file from", "command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command == \"com.example.commands.SqueezeBoxRadio4\": squeeze_controller.play_radio4(params) except squeezebox.UserException as e:", "with. should_register = ( args.device_model_id and args.device_model_id != device_model_id) device_model_id = args.device_model_id or", "'device_config_library.json' ), help='path to store and read device configuration') parser.add_argument('--credentials', type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join(", "squeeze_controller with open(credentials_path, \"r\") as f: creds = json.loads(f.read()) squeeze_controller = squeezebox.AssistantSqueezeBoxController(creds['squeezebox_server']['ip'], creds['squeezebox_server']['port'],", "event.type == EventType.ON_RENDER_RESPONSE: log({'type': 'google response', 'text': event.args['text']}) elif event.type == EventType.ON_CONVERSATION_TURN_STARTED: log({'type':", "speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayNext\": ans = str(squeeze_controller.search_and_play_next(params))", "registered with. should_register = ( args.device_model_id and args.device_model_id != device_model_id) device_model_id = args.device_model_id", "str(squeeze_controller.search_and_play_next(params)) speak(ans) log({'type': 'squeezebox response', 'message': ans}) elif command == \"com.example.commands.SqueezeBoxPlayEnd\": ans =", "'--version', action='version', version='%(prog)s ' + Assistant.__version_str__()) parser.add_argument('--logfile', type=str, required=False, help='file to write the", "type=existing_file, metavar='OAUTH2_CREDENTIALS_FILE', default=os.path.join( os.path.expanduser('~/.config'), 'google-oauthlib-tool', 'credentials.json' ), help='path to store and read OAuth2", "if not args.device_model_id and not device_model_id: raise Exception('Missing --device-model-id option') # Re-register if", "elif command == \"com.example.commands.SqueezeBoxVolume\": squeeze_controller.set_volume(params) elif command == \"com.example.commands.SqueezeBoxSleep\": squeeze_controller.sleep_in(params) elif command ==", "= assistant.start() device_id = assistant.device_id log({ \"type\": \"starting up\", \"device_model_id\": device_model_id, \"device_id\": device_id", "is different from the last \"device_id\": if should_register or (device_id != last_device_id): if", "elif command == \"com.example.commands.SqueezeBoxSendMusic\": squeeze_controller.send_music(params) elif command == \"com.example.commands.SqueezeBoxSync\": squeeze_controller.sync_player(params) elif command ==", "\"\"\" class Logger(object): def __init__(self, filename): self.filename = filename def write(self, message): with", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "type=str, required=False, help='file to write the log to') parser.add_argument('--home_control_credentials', type=str, required=True, help='path of", "'--project_id', type=str, metavar='PROJECT_ID', required=False, help='the project ID used to register this device') parser.add_argument('--device-config',", "'exception', 'message': e}) elif event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED: log({'type': 'speech', 'text': event.args['text']}) elif event.type", "event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions: log({'type': 'device action', 'command': command,", "__init__(self, filename): self.filename = filename def write(self, message): with open(self.filename, \"a\") as f:", "event to process. \"\"\" if event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions:", "with Assistant(credentials, device_model_id) as assistant: events = assistant.start() device_id = assistant.device_id log({ \"type\":", "except Exception as e: e = str(e) speak(e) log({'type': 'exception', 'message': e}) elif", "use Device Actions or see your device in Assistant Settings. In order to", "'model_id': device_model_id, }, f) else: print(WARNING_NOT_REGISTERED) setup_controllers(args.home_control_credentials) setup_speech(assistant) for event in events: process_event(event)", "https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device \"\"\" class Logger(object): def __init__(self, filename): self.filename = filename def write(self, message):", "import google.oauth2.credentials from google.assistant.library import Assistant from google.assistant.library.event import EventType from google.assistant.library.file_helpers import" ]
[ "else: cmd_test = \"cp \" + folders[i] + \" \" + tmp_pathings +", "os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "V = os.listdir(path) for vj in V: tmp_path = path + \"/\" +", "Cross Validation os.system(\"mkdir cross_valid\") # Copy path into train and test dataset os.makedirs(os.path.join(\"cross_valid\",", "\"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11):", "test for:\", 'blue', attrs=['bold']) print vj print colored(\"Made train and test for cross-valid", "# delete them if(len(incompatible_files) > 0): print colored('Converting incompatible files', 'red', attrs=['bold']) for", "glob import sklearn.datasets from colorama import init from termcolor import colored from random", "Copy path into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\"))", "+ tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6))", "\"iconv -f ISO-8859-1 \" + f + \" -t UTF-8 -o tmp\" cmdd", "print colored(\"Made train and test for:\", 'blue', attrs=['bold']) print vj print colored(\"Made train", "path = \"20news-18828\" # Create new dataset for processing cmd = \"cp -r", "os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\",", "1 print colored(\"Made train and test for:\", 'blue', attrs=['bold']) print vj print colored(\"Made", "\"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f", "+ folders[i] + \" \" + tmp_pathings + \"/\" + str(i) os.system(cmd_test) i", "= sklearn.datasets.load_files(path) incompatible_files = [] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1])", "dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path): # find incompatible files print colored('Finding", "colored('Converting incompatible files', 'red', attrs=['bold']) for f in incompatible_files: print colored(\"Changing file to", "from random import sample import random import numpy as np def main(): init()", "print colored(\"Changing file to UTF-8:\", 'red'), f cmd = \"iconv -f ISO-8859-1 \"", "dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy path", "\"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\"))", "+ \" \" + tmp_pathing + \"/\" + str(i) os.system(cmd_train) else: cmd_test =", "%(f) V = os.listdir(path) for vj in V: tmp_path = path + \"/\"", "tmp_pathings = pathing + \"/test/\" + vj cmd_1 = \"mkdir \" + tmp_pathing", "the dataset path = \"20news-18828\" # Create new dataset for processing cmd =", "import glob import sklearn.datasets from colorama import init from termcolor import colored from", "= \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split =", "# Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy path into train and", "folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1),", "monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy path into train", "sample(range(0, len(folders)-1), train_split) i = 0 folder_indices = len(folders) -1 while i <=", "new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte Carlo", "os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\",", "+ tmp_pathing + \"/\" + str(i) os.system(cmd_train) else: cmd_test = \"cp \" +", "+ vj tmp_pathings = pathing + \"/test/\" + vj cmd_1 = \"mkdir \"", "random import sample import random import numpy as np def main(): init() #", "= pathing + \"/test/\" + vj cmd_1 = \"mkdir \" + tmp_pathing cmd_2", "with utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files =", "# get the dataset path = \"20news-18828\" # Create new dataset for processing", "0.6)) indices = sample(range(0, len(folders)-1), train_split) i = 0 folder_indices = len(folders) -1", "+ \" -t UTF-8 -o tmp\" cmdd = \"cp tmp \" + f", "= \"iconv -f ISO-8859-1 \" + f + \" -t UTF-8 -o tmp\"", "= \"cp -r \" + path + \" dataset_process\" print colored(\"Copying dataset into", "\"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\"))", "\" -t UTF-8 -o tmp\" cmdd = \"cp tmp \" + f os.system(cmd)", "+ str(i) os.system(cmd_train) else: cmd_test = \"cp \" + folders[i] + \" \"", "path into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "+ vj tmp_pathing = pathing + \"/train/\" + vj tmp_pathings = pathing +", "f def change_incompatible_files(path): # find incompatible files print colored('Finding files incompatible with utf8:", "vj tmp_pathings = pathing + \"/test/\" + vj cmd_1 = \"mkdir \" +", "import colored from random import sample import random import numpy as np def", "\" + f + \" -t UTF-8 -o tmp\" cmdd = \"cp tmp", "\"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\"))", "os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in", "f in incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'), f cmd = \"iconv", "as np def main(): init() # get the dataset path = \"20news-18828\" #", "file to UTF-8:\", 'red'), f cmd = \"iconv -f ISO-8859-1 \" + f", "\" + folders[i] + \" \" + tmp_pathings + \"/\" + str(i) os.system(cmd_test)", "into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\")", "-r dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy", "# Create new dataset for processing cmd = \"cp -r \" + path", "numpy as np def main(): init() # get the dataset path = \"20news-18828\"", "+ 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found'", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\"))", "sample import random import numpy as np def main(): init() # get the", "Create new dataset for processing cmd = \"cp -r \" + path +", "= \"cp \" + folders[i] + \" \" + tmp_pathings + \"/\" +", "print f def change_incompatible_files(path): # find incompatible files print colored('Finding files incompatible with", "colored(len(incompatible_files), 'yellow'), 'files found' # delete them if(len(incompatible_files) > 0): print colored('Converting incompatible", "glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1), train_split) i", "import sklearn.datasets from colorama import init from termcolor import colored from random import", "i in indices: cmd_train = \"cp \" + folders[i] + \" \" +", "for f in incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'), f cmd =", "tmp_pathing = pathing + \"/train/\" + vj tmp_pathings = pathing + \"/test/\" +", "files print colored('Finding files incompatible with utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer()", "<gh_stars>0 # <NAME> import sys import os import glob import sklearn.datasets from colorama", "os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing = \"cross_valid/%d\"", "\"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\"))", "\" + tmp_pathings + \"/\" + str(i) os.system(cmd_test) i += 1 print colored(\"Made", "colored(\"Made train and test for cross-valid dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path):", "> 0): print colored('Converting incompatible files', 'red', attrs=['bold']) for f in incompatible_files: print", "\"/\" + str(i) os.system(cmd_train) else: cmd_test = \"cp \" + folders[i] + \"", "os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "str(i) os.system(cmd_train) else: cmd_test = \"cp \" + folders[i] + \" \" +", "processing cmd = \"cp -r \" + path + \" dataset_process\" print colored(\"Copying", "os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for", "= int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1), train_split) i = 0 folder_indices", "+ \"/train/\" + vj tmp_pathings = pathing + \"/test/\" + vj cmd_1 =", "os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\",", "\"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\"))", "os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\",", "range(1,11): pathing = \"cross_valid/%d\" %(f) V = os.listdir(path) for vj in V: tmp_path", "if i in indices: cmd_train = \"cp \" + folders[i] + \" \"", "import random import numpy as np def main(): init() # get the dataset", "i <= folder_indices: if i in indices: cmd_train = \"cp \" + folders[i]", "cmd_1 = \"mkdir \" + tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1)", "vj cmd_1 = \"mkdir \" + tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings", "pass print colored(len(incompatible_files), 'yellow'), 'files found' # delete them if(len(incompatible_files) > 0): print", "except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found' # delete them if(len(incompatible_files) >", "\" + tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders =", "os.system(cmd_train) else: cmd_test = \"cp \" + folders[i] + \" \" + tmp_pathings", "UTF-8:\", 'red'), f cmd = \"iconv -f ISO-8859-1 \" + f + \"", "-f ISO-8859-1 \" + f + \" -t UTF-8 -o tmp\" cmdd =", "\"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\"))", "files = sklearn.datasets.load_files(path) incompatible_files = [] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i +", "i += 1 print colored(\"Made train and test for:\", 'blue', attrs=['bold']) print vj", "cross-valid dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path): # find incompatible files print", "\"/\" + str(i) os.system(cmd_test) i += 1 print colored(\"Made train and test for:\",", "os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\",", "print colored('Converting incompatible files', 'red', attrs=['bold']) for f in incompatible_files: print colored(\"Changing file", "colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm", "cmd = \"cp -r \" + path + \" dataset_process\" print colored(\"Copying dataset", "\"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders)", "= pathing + \"/train/\" + vj tmp_pathings = pathing + \"/test/\" + vj", "V: tmp_path = path + \"/\" + vj tmp_pathing = pathing + \"/train/\"", "tmp_pathing + \"/\" + str(i) os.system(cmd_train) else: cmd_test = \"cp \" + folders[i]", "\"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross Validation", "\"cross_valid/%d\" %(f) V = os.listdir(path) for vj in V: tmp_path = path +", "-1 while i <= folder_indices: if i in indices: cmd_train = \"cp \"", "colored(\"Made train and test for:\", 'blue', attrs=['bold']) print vj print colored(\"Made train and", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\"))", "\"/train/\" + vj tmp_pathings = pathing + \"/test/\" + vj cmd_1 = \"mkdir", "os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\",", "files incompatible with utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path)", "-o tmp\" cmdd = \"cp tmp \" + f os.system(cmd) os.system(cmdd) os.remove(\"tmp\") main()", "pathing + \"/test/\" + vj cmd_1 = \"mkdir \" + tmp_pathing cmd_2 =", "<= folder_indices: if i in indices: cmd_train = \"cp \" + folders[i] +", "colored from random import sample import random import numpy as np def main():", "os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing = \"cross_valid/%d\" %(f) V = os.listdir(path)", "\"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\"))", "\" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) *", "= \"cross_valid/%d\" %(f) V = os.listdir(path) for vj in V: tmp_path = path", "+ vj cmd_1 = \"mkdir \" + tmp_pathing cmd_2 = \"mkdir \" +", "\" \" + tmp_pathing + \"/\" + str(i) os.system(cmd_train) else: cmd_test = \"cp", "dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\"))", "+ tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path,", "f in range(1,11): pathing = \"cross_valid/%d\" %(f) V = os.listdir(path) for vj in", "\"cp \" + folders[i] + \" \" + tmp_pathing + \"/\" + str(i)", "0 folder_indices = len(folders) -1 while i <= folder_indices: if i in indices:", "'*')) train_split = int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1), train_split) i =", "int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1), train_split) i = 0 folder_indices =", "f + \" -t UTF-8 -o tmp\" cmdd = \"cp tmp \" +", "'red'), f cmd = \"iconv -f ISO-8859-1 \" + f + \" -t", "os.system(cmd_test) i += 1 print colored(\"Made train and test for:\", 'blue', attrs=['bold']) print", "= \"cp \" + folders[i] + \" \" + tmp_pathing + \"/\" +", "pathing = \"cross_valid/%d\" %(f) V = os.listdir(path) for vj in V: tmp_path =", "os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "test for cross-valid dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path): # find incompatible", "attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): #", "colored('Finding files incompatible with utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files =", "from termcolor import colored from random import sample import random import numpy as", "+= 1 print colored(\"Made train and test for:\", 'blue', attrs=['bold']) print vj print", "0): print colored('Converting incompatible files', 'red', attrs=['bold']) for f in incompatible_files: print colored(\"Changing", "1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found' #", "tmp_path = path + \"/\" + vj tmp_pathing = pathing + \"/train/\" +", "\"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing = \"cross_valid/%d\" %(f) V =", "= glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1), train_split)", "= \"mkdir \" + tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2)", "incompatible with utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files", "os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\",", "in indices: cmd_train = \"cp \" + folders[i] + \" \" + tmp_pathing", "os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\",", "import os import glob import sklearn.datasets from colorama import init from termcolor import", "-t UTF-8 -o tmp\" cmdd = \"cp tmp \" + f os.system(cmd) os.system(cmdd)", "os import glob import sklearn.datasets from colorama import init from termcolor import colored", "sys import os import glob import sklearn.datasets from colorama import init from termcolor", "os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\",", "\" \" + tmp_pathings + \"/\" + str(i) os.system(cmd_test) i += 1 print", "'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = [] for i", "def main(): init() # get the dataset path = \"20news-18828\" # Create new", "cross_valid\") # Copy path into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\"))", "\"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\"))", "'red', attrs=['bold']) for f in incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'), f", "os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") #", "# <NAME> import sys import os import glob import sklearn.datasets from colorama import", "folders[i] + \" \" + tmp_pathing + \"/\" + str(i) os.system(cmd_train) else: cmd_test", "Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy path into train and test", "dataset for processing cmd = \"cp -r \" + path + \" dataset_process\"", "import init from termcolor import colored from random import sample import random import", "train and test for cross-valid dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path): #", "+ tmp_pathings + \"/\" + str(i) os.system(cmd_test) i += 1 print colored(\"Made train", "', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = [] for", "'yellow'), 'files found' # delete them if(len(incompatible_files) > 0): print colored('Converting incompatible files',", "colored(\"Changing file to UTF-8:\", 'red'), f cmd = \"iconv -f ISO-8859-1 \" +", "indices: cmd_train = \"cp \" + folders[i] + \" \" + tmp_pathing +", "* 0.6)) indices = sample(range(0, len(folders)-1), train_split) i = 0 folder_indices = len(folders)", "\" + folders[i] + \" \" + tmp_pathing + \"/\" + str(i) os.system(cmd_train)", "import numpy as np def main(): init() # get the dataset path =", "'files found' # delete them if(len(incompatible_files) > 0): print colored('Converting incompatible files', 'red',", "= \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross", "for processing cmd = \"cp -r \" + path + \" dataset_process\" print", "for vj in V: tmp_path = path + \"/\" + vj tmp_pathing =", "+ \"/\" + str(i) os.system(cmd_test) i += 1 print colored(\"Made train and test", "print colored(\"Made train and test for cross-valid dataset:\", 'blue', attrs=['bold']) print f def", "# find incompatible files print colored('Finding files incompatible with utf8: ', 'green', attrs=['bold'])", "\"cp \" + folders[i] + \" \" + tmp_pathings + \"/\" + str(i)", "and test for:\", 'blue', attrs=['bold']) print vj print colored(\"Made train and test for", "os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\",", "\"test\")) for f in range(1,11): pathing = \"cross_valid/%d\" %(f) V = os.listdir(path) for", "into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\"))", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\"))", "os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = [] for i in", "Validation os.system(\"mkdir cross_valid\") # Copy path into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\"))", "+ \"/test/\" + vj cmd_1 = \"mkdir \" + tmp_pathing cmd_2 = \"mkdir", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing =", "print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path)", "\"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\"))", "folders[i] + \" \" + tmp_pathings + \"/\" + str(i) os.system(cmd_test) i +=", "train and test for:\", 'blue', attrs=['bold']) print vj print colored(\"Made train and test", "os.system(\"mkdir cross_valid\") # Copy path into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\",", "\"/\" + vj tmp_pathing = pathing + \"/train/\" + vj tmp_pathings = pathing", "incompatible_files = [] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError:", "len(folders) -1 while i <= folder_indices: if i in indices: cmd_train = \"cp", "change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir", "UTF-8 -o tmp\" cmdd = \"cp tmp \" + f os.system(cmd) os.system(cmdd) os.remove(\"tmp\")", "os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\",", "+ \"/\" + vj tmp_pathing = pathing + \"/train/\" + vj tmp_pathings =", "cmd_test = \"cp \" + folders[i] + \" \" + tmp_pathings + \"/\"", "monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir cross_valid\")", "= len(folders) -1 while i <= folder_indices: if i in indices: cmd_train =", "random import numpy as np def main(): init() # get the dataset path", "new dataset for processing cmd = \"cp -r \" + path + \"", "= [] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i])", "os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6)) indices =", "and test for cross-valid dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path): # find", "attrs=['bold']) for f in incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'), f cmd", "def monte_carlo_cross(path): # Monte Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy path into", "train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\",", "os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\",", "indices = sample(range(0, len(folders)-1), train_split) i = 0 folder_indices = len(folders) -1 while", "# Copy path into train and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\",", "os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path):", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\"))", "\" dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\"", "os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\",", "incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found' # delete them if(len(incompatible_files)", "found' # delete them if(len(incompatible_files) > 0): print colored('Converting incompatible files', 'red', attrs=['bold'])", "= path + \"/\" + vj tmp_pathing = pathing + \"/train/\" + vj", "\"mkdir \" + tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders", "find incompatible files print colored('Finding files incompatible with utf8: ', 'green', attrs=['bold']) count_vector", "main(): init() # get the dataset path = \"20news-18828\" # Create new dataset", "\"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\"))", "\" + tmp_pathing + \"/\" + str(i) os.system(cmd_train) else: cmd_test = \"cp \"", "incompatible files print colored('Finding files incompatible with utf8: ', 'green', attrs=['bold']) count_vector =", "try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'),", "np def main(): init() # get the dataset path = \"20news-18828\" # Create", "-r \" + path + \" dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue',", "\"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\"))", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"5\")) os.makedirs(os.path.join(\"cross_valid/5\", \"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\"))", "utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = []", "folder_indices = len(folders) -1 while i <= folder_indices: if i in indices: cmd_train", "os.listdir(path) for vj in V: tmp_path = path + \"/\" + vj tmp_pathing", "print colored(len(incompatible_files), 'yellow'), 'files found' # delete them if(len(incompatible_files) > 0): print colored('Converting", "dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path)", "os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\",", "path + \"/\" + vj tmp_pathing = pathing + \"/train/\" + vj tmp_pathings", "os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\",", "sklearn.datasets.load_files(path) incompatible_files = [] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except", "UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found' # delete them", "cmd = \"iconv -f ISO-8859-1 \" + f + \" -t UTF-8 -o", "+ f + \" -t UTF-8 -o tmp\" cmdd = \"cp tmp \"", "for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError:", "'blue', attrs=['bold']) print vj print colored(\"Made train and test for cross-valid dataset:\", 'blue',", "dataset path = \"20news-18828\" # Create new dataset for processing cmd = \"cp", "def change_incompatible_files(path): # find incompatible files print colored('Finding files incompatible with utf8: ',", "\" + path + \" dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold'])", "from colorama import init from termcolor import colored from random import sample import", "attrs=['bold']) print vj print colored(\"Made train and test for cross-valid dataset:\", 'blue', attrs=['bold'])", "sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = [] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i", "os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\",", "dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def", "+ \" dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path =", "folder_indices: if i in indices: cmd_train = \"cp \" + folders[i] + \"", "= os.listdir(path) for vj in V: tmp_path = path + \"/\" + vj", "= sample(range(0, len(folders)-1), train_split) i = 0 folder_indices = len(folders) -1 while i", "i = 0 folder_indices = len(folders) -1 while i <= folder_indices: if i", "incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'), f cmd = \"iconv -f ISO-8859-1", "\"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing = \"cross_valid/%d\" %(f)", "ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found' # delete them if(len(incompatible_files) > 0):", "tmp_pathing cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*'))", "\"/test/\" + vj cmd_1 = \"mkdir \" + tmp_pathing cmd_2 = \"mkdir \"", "i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass", "os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "to UTF-8:\", 'red'), f cmd = \"iconv -f ISO-8859-1 \" + f +", "path + \" dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd) new_path", "tmp_pathings + \"/\" + str(i) os.system(cmd_test) i += 1 print colored(\"Made train and", "= \"20news-18828\" # Create new dataset for processing cmd = \"cp -r \"", "\"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\"))", "them if(len(incompatible_files) > 0): print colored('Converting incompatible files', 'red', attrs=['bold']) for f in", "os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing = \"cross_valid/%d\" %(f) V", "+ \" \" + tmp_pathings + \"/\" + str(i) os.system(cmd_test) i += 1", "incompatible files', 'red', attrs=['bold']) for f in incompatible_files: print colored(\"Changing file to UTF-8:\",", "dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\"))", "import sys import os import glob import sklearn.datasets from colorama import init from", "while i <= folder_indices: if i in indices: cmd_train = \"cp \" +", "for:\", 'blue', attrs=['bold']) print vj print colored(\"Made train and test for cross-valid dataset:\",", "'blue', attrs=['bold']) print f def change_incompatible_files(path): # find incompatible files print colored('Finding files", "init() # get the dataset path = \"20news-18828\" # Create new dataset for", "\"20news-18828\" # Create new dataset for processing cmd = \"cp -r \" +", "\"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\"))", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\"))", "delete them if(len(incompatible_files) > 0): print colored('Converting incompatible files', 'red', attrs=['bold']) for f", "print colored('Finding files incompatible with utf8: ', 'green', attrs=['bold']) count_vector = sklearn.feature_extraction.text.CountVectorizer() files", "+ str(i) os.system(cmd_test) i += 1 print colored(\"Made train and test for:\", 'blue',", "files', 'red', attrs=['bold']) for f in incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'),", "\"8\")) os.makedirs(os.path.join(\"cross_valid/8\", \"train\")) os.makedirs(os.path.join(\"cross_valid/8\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"9\")) os.makedirs(os.path.join(\"cross_valid/9\", \"train\")) os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\"))", "attrs=['bold']) print f def change_incompatible_files(path): # find incompatible files print colored('Finding files incompatible", "vj tmp_pathing = pathing + \"/train/\" + vj tmp_pathings = pathing + \"/test/\"", "in range(1,11): pathing = \"cross_valid/%d\" %(f) V = os.listdir(path) for vj in V:", "+ path + \" dataset_process\" print colored(\"Copying dataset into dataset_process\", 'blue', attrs=['bold']) os.system(cmd)", "cmd_2 = \"mkdir \" + tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split", "if(len(incompatible_files) > 0): print colored('Converting incompatible files', 'red', attrs=['bold']) for f in incompatible_files:", "vj print colored(\"Made train and test for cross-valid dataset:\", 'blue', attrs=['bold']) print f", "train_split) i = 0 folder_indices = len(folders) -1 while i <= folder_indices: if", "\"cp -r \" + path + \" dataset_process\" print colored(\"Copying dataset into dataset_process\",", "and test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\"))", "in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print", "range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files),", "print vj print colored(\"Made train and test for cross-valid dataset:\", 'blue', attrs=['bold']) print", "init from termcolor import colored from random import sample import random import numpy", "Carlo Cross Validation os.system(\"mkdir cross_valid\") # Copy path into train and test dataset", "\"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\"))", "vj in V: tmp_path = path + \"/\" + vj tmp_pathing = pathing", "= 0 folder_indices = len(folders) -1 while i <= folder_indices: if i in", "in V: tmp_path = path + \"/\" + vj tmp_pathing = pathing +", "os.makedirs(os.path.join(\"cross_valid/9\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"10\")) os.makedirs(os.path.join(\"cross_valid/10\", \"train\")) os.makedirs(os.path.join(\"cross_valid/10\", \"test\")) for f in range(1,11): pathing", "tmp_pathings os.system(cmd_1) os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6)) indices", "f cmd = \"iconv -f ISO-8859-1 \" + f + \" -t UTF-8", "except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files found' # delete", "os.makedirs(os.path.join(\"cross_valid\", \"3\")) os.makedirs(os.path.join(\"cross_valid/3\", \"train\")) os.makedirs(os.path.join(\"cross_valid/3\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"4\")) os.makedirs(os.path.join(\"cross_valid/4\", \"train\")) os.makedirs(os.path.join(\"cross_valid/4\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "[] for i in range(len(files.filenames)): try: count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except", "pathing + \"/train/\" + vj tmp_pathings = pathing + \"/test/\" + vj cmd_1", "<NAME> import sys import os import glob import sklearn.datasets from colorama import init", "+ \"/\" + str(i) os.system(cmd_train) else: cmd_test = \"cp \" + folders[i] +", "change_incompatible_files(path): # find incompatible files print colored('Finding files incompatible with utf8: ', 'green',", "len(folders)-1), train_split) i = 0 folder_indices = len(folders) -1 while i <= folder_indices:", "termcolor import colored from random import sample import random import numpy as np", "sklearn.datasets from colorama import init from termcolor import colored from random import sample", "train_split = int(round(len(folders) * 0.6)) indices = sample(range(0, len(folders)-1), train_split) i = 0", "os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\",", "= sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = [] for i in range(len(files.filenames)): try:", "cmd_train = \"cp \" + folders[i] + \" \" + tmp_pathing + \"/\"", "count_vector = sklearn.feature_extraction.text.CountVectorizer() files = sklearn.datasets.load_files(path) incompatible_files = [] for i in range(len(files.filenames)):", "count_vector.fit_transform(files.data[i:i + 1]) except UnicodeDecodeError: incompatible_files.append(files.filenames[i]) except ValueError: pass print colored(len(incompatible_files), 'yellow'), 'files", "ISO-8859-1 \" + f + \" -t UTF-8 -o tmp\" cmdd = \"cp", "colorama import init from termcolor import colored from random import sample import random", "get the dataset path = \"20news-18828\" # Create new dataset for processing cmd", "\"train\")) os.makedirs(os.path.join(\"cross_valid/5\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\"))", "os.system(cmd_2) folders = glob.glob(os.path.join(tmp_path, '*')) train_split = int(round(len(folders) * 0.6)) indices = sample(range(0,", "in incompatible_files: print colored(\"Changing file to UTF-8:\", 'red'), f cmd = \"iconv -f", "for cross-valid dataset:\", 'blue', attrs=['bold']) print f def change_incompatible_files(path): # find incompatible files", "\"6\")) os.makedirs(os.path.join(\"cross_valid/6\", \"train\")) os.makedirs(os.path.join(\"cross_valid/6\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"7\")) os.makedirs(os.path.join(\"cross_valid/7\", \"train\")) os.makedirs(os.path.join(\"cross_valid/7\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"8\"))", "os.system(cmd) new_path = \"dataset_process\" change_incompatible_files(new_path) monte_carlo_cross(new_path) os.system(\"rm -r dataset_process\") def monte_carlo_cross(path): # Monte", "+ folders[i] + \" \" + tmp_pathing + \"/\" + str(i) os.system(cmd_train) else:", "import sample import random import numpy as np def main(): init() # get", "str(i) os.system(cmd_test) i += 1 print colored(\"Made train and test for:\", 'blue', attrs=['bold'])", "test dataset os.makedirs(os.path.join(\"cross_valid\", \"1\")) os.makedirs(os.path.join(\"cross_valid/1\", \"train\")) os.makedirs(os.path.join(\"cross_valid/1\", \"test\")) os.makedirs(os.path.join(\"cross_valid\", \"2\")) os.makedirs(os.path.join(\"cross_valid/2\", \"train\")) os.makedirs(os.path.join(\"cross_valid/2\",", "for f in range(1,11): pathing = \"cross_valid/%d\" %(f) V = os.listdir(path) for vj" ]
[ "import os from .server import Server from .protoModule import ProtoModule __path__.append(os.path.join(os.path.dirname(__file__), 'comm')) __all__", ".server import Server from .protoModule import ProtoModule __path__.append(os.path.join(os.path.dirname(__file__), 'comm')) __all__ = ['Server', 'ProtoModule']", "<gh_stars>0 import os from .server import Server from .protoModule import ProtoModule __path__.append(os.path.join(os.path.dirname(__file__), 'comm'))", "os from .server import Server from .protoModule import ProtoModule __path__.append(os.path.join(os.path.dirname(__file__), 'comm')) __all__ =", "from .server import Server from .protoModule import ProtoModule __path__.append(os.path.join(os.path.dirname(__file__), 'comm')) __all__ = ['Server'," ]
[ "yield a single batched sample \"\"\" for b in self.dl: yield to_device(b, self.device)", "dl self.device = device def __iter__(self): \"\"\" Move dataloader to device and yield", "Device to load to :return (torch.device): Data loaded onto default device \"\"\" if", "shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return device_dl def predict_image(image, model,", "device \"\"\" def __init__(self, dl, device): self.dl = dl self.device = device def", "time, torch import torchvision.transforms as tt from torchvision.datasets import ImageFolder from torch.utils.data import", "device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def to_device(data, device): \"\"\"", ":param data(torch.tensor): Dataset to load * :param device(torch.device): Device to load to :return", "transforms) dl = DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl", "tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size, shuffle = shuffle_,", "pixel/channel values * :param model(torch.nn.module): Model * :param classMap(dict): Mapped class values for", "to_device(b, self.device) def __len__(self): \"\"\" Number of batches \"\"\" return len(self.dl) def mem_report():", "* :param data(torch.tensor): Dataset to load * :param device(torch.device): Device to load to", "\"\"\" Loads a batch of data to the device * :param param(dict): Batch", ":param model(torch.nn.module): Model * :param classMap(dict): Mapped class values for prediction output *", "= DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl,", "class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and sends a pytorch dataloader to", "def __iter__(self): \"\"\" Move dataloader to device and yield a single batched sample", "data onto default device * :param data(torch.tensor): Dataset to load * :param device(torch.device):", "from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and sends", "default device * :param data(torch.tensor): Dataset to load * :param device(torch.device): Device to", "GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB /", "img(np.ndarray): Numpy array of pixel/channel values * :param model(torch.nn.module): Model * :param classMap(dict):", "mem_report(): \"\"\" Returns available device and device properties \"\"\" print(\"CPU RAM Free: \"", "{gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default device", "device * :param data(torch.tensor): Dataset to load * :param device(torch.device): Device to load", "data to the device * :param param(dict): Batch parameters * :param device(torch.device): Device", "Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU", "Return current default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def", "Class prediction for the image \"\"\" X = to_device(image.unsqueeze(0), device) _, prediction =", "Mapped class values for prediction output * :param device(torch.device): Device to load data", "to load to :return (torch.device): Data loaded onto default device \"\"\" if isinstance(data,", "output * :param device(torch.device): Device to load data onto :return (str): Class prediction", "DeviceDataLoader(dl, device) return device_dl def predict_image(image, model, classMap, device): \"\"\" Predicts the class", "yield to_device(b, self.device) def __len__(self): \"\"\" Number of batches \"\"\" return len(self.dl) def", "to current device \"\"\" def __init__(self, dl, device): self.dl = dl self.device =", "i, gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB |", "* :param device(torch.device): Device to load data onto :return (str): Class prediction for", "value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl = DataLoader(", "---------------------- Wraps and sends a pytorch dataloader to current device \"\"\" def __init__(self,", "data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads a batch of data to", "DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device)", "self.dl = dl self.device = device def __iter__(self): \"\"\" Move dataloader to device", "device_dl = DeviceDataLoader(dl, device) return device_dl def predict_image(image, model, classMap, device): \"\"\" Predicts", "import sys, humanize, psutil, GPUtil, time, torch import torchvision.transforms as tt from torchvision.datasets", "ds = ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8,", "DeviceDataLoader Class ---------------------- Wraps and sends a pytorch dataloader to current device \"\"\"", "of pixel/channel values * :param model(torch.nn.module): Model * :param classMap(dict): Mapped class values", "of batches \"\"\" return len(self.dl) def mem_report(): \"\"\" Returns available device and device", "from torchvision.datasets import ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class", "Loads a batch of data to the device * :param param(dict): Batch parameters", "a batch of data to the device * :param param(dict): Batch parameters *", "dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch data loaded onto default device \"\"\"", "a single image * :param img(np.ndarray): Numpy array of pixel/channel values * :param", "= device def __iter__(self): \"\"\" Move dataloader to device and yield a single", "parameters * :param device(torch.device): Device to load to * :param dataset(torch.tensor): Data to", "to :return (torch.device): Data loaded onto default device \"\"\" if isinstance(data, (list,tuple)): return", "def load_set(param, device, dataset): \"\"\" Loads a batch of data to the device", "to load * :param device(torch.device): Device to load to :return (torch.device): Data loaded", "single image * :param img(np.ndarray): Numpy array of pixel/channel values * :param model(torch.nn.module):", "batched sample \"\"\" for b in self.dl: yield to_device(b, self.device) def __len__(self): \"\"\"", "= DeviceDataLoader(dl, device) return device_dl def predict_image(image, model, classMap, device): \"\"\" Predicts the", "\"\"\" def __init__(self, dl, device): self.dl = dl self.device = device def __iter__(self):", "{gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default device \"\"\" if", "enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def", "Loads data onto default device * :param data(torch.tensor): Dataset to load * :param", ":return (str): Class prediction for the image \"\"\" X = to_device(image.unsqueeze(0), device) _,", "class of a single image * :param img(np.ndarray): Numpy array of pixel/channel values", "torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and sends a", ":param img(np.ndarray): Numpy array of pixel/channel values * :param model(torch.nn.module): Model * :param", "of data to the device * :param param(dict): Batch parameters * :param device(torch.device):", "__init__(self, dl, device): self.dl = dl self.device = device def __iter__(self): \"\"\" Move", "len(self.dl) def mem_report(): \"\"\" Returns available device and device properties \"\"\" print(\"CPU RAM", "torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def to_device(data, device): \"\"\" Loads data onto", "ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return", "get_default_device(): \"\"\" Return current default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return", "* :param device(torch.device): Device to load to * :param dataset(torch.tensor): Data to load", ":param dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch data loaded onto default device", "dataset): \"\"\" Loads a batch of data to the device * :param param(dict):", "for the image \"\"\" X = to_device(image.unsqueeze(0), device) _, prediction = torch.max(model(X), dim=1)", "path, shuffle_, batch_size = [value for key, value in param.items()] transforms = tt.Compose([tt.ToTensor()])", "= GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB", "\"\"\" Loads data onto default device * :param data(torch.tensor): Dataset to load *", "pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return device_dl def predict_image(image, model, classMap, device):", "properties \"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i,", "num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return device_dl def predict_image(image, model, classMap,", "Numpy array of pixel/channel values * :param model(torch.nn.module): Model * :param classMap(dict): Mapped", "Class ---------------------- Wraps and sends a pytorch dataloader to current device \"\"\" def", "RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu in enumerate(GPUs):", "(list,tuple)): return [to_device(x, device) for x in data] return data.to(device, non_blocking=True) def load_set(param,", "prediction output * :param device(torch.device): Device to load data onto :return (str): Class", "device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device) for x in data] return", "Device to load data onto :return (str): Class prediction for the image \"\"\"", "Move dataloader to device and yield a single batched sample \"\"\" for b", "return torch.device('cpu') def to_device(data, device): \"\"\" Loads data onto default device * :param", "load :return (DeviceDataLoader): Batch data loaded onto default device \"\"\" path, shuffle_, batch_size", "and yield a single batched sample \"\"\" for b in self.dl: yield to_device(b,", "Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current", "\"\"\" Return current default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu')", "[to_device(x, device) for x in data] return data.to(device, non_blocking=True) def load_set(param, device, dataset):", "transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size, shuffle", "default device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device) for x in data]", "\"\"\" Move dataloader to device and yield a single batched sample \"\"\" for", "\"\"\" path, shuffle_, batch_size = [value for key, value in param.items()] transforms =", "in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl = DataLoader( ds,", "\"\"\" Predicts the class of a single image * :param img(np.ndarray): Numpy array", "default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def to_device(data, device):", "Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default", "non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads a batch of data to the", "torchvision.transforms as tt from torchvision.datasets import ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader():", "data onto :return (str): Class prediction for the image \"\"\" X = to_device(image.unsqueeze(0),", ":return (torch.device): Data loaded onto default device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x,", "GPUtil, time, torch import torchvision.transforms as tt from torchvision.datasets import ImageFolder from torch.utils.data", "... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return", ":param device(torch.device): Device to load to :return (torch.device): Data loaded onto default device", "device): \"\"\" Loads data onto default device * :param data(torch.tensor): Dataset to load", "device(torch.device): Device to load data onto :return (str): Class prediction for the image", "device(torch.device): Device to load to * :param dataset(torch.tensor): Data to load :return (DeviceDataLoader):", "to_device(data, device): \"\"\" Loads data onto default device * :param data(torch.tensor): Dataset to", ") device_dl = DeviceDataLoader(dl, device) return device_dl def predict_image(image, model, classMap, device): \"\"\"", "Data loaded onto default device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device) for", "prediction for the image \"\"\" X = to_device(image.unsqueeze(0), device) _, prediction = torch.max(model(X),", "onto default device * :param data(torch.tensor): Dataset to load * :param device(torch.device): Device", "Data to load :return (DeviceDataLoader): Batch data loaded onto default device \"\"\" path,", "batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return device_dl", "classMap, device): \"\"\" Predicts the class of a single image * :param img(np.ndarray):", "Device to load to * :param dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch", "single batched sample \"\"\" for b in self.dl: yield to_device(b, self.device) def __len__(self):", "load * :param device(torch.device): Device to load to :return (torch.device): Data loaded onto", "import ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps", "the device * :param param(dict): Batch parameters * :param device(torch.device): Device to load", "dataloader to current device \"\"\" def __init__(self, dl, device): self.dl = dl self.device", "a single batched sample \"\"\" for b in self.dl: yield to_device(b, self.device) def", "onto default device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device) for x in", "sys, humanize, psutil, GPUtil, time, torch import torchvision.transforms as tt from torchvision.datasets import", "def predict_image(image, model, classMap, device): \"\"\" Predicts the class of a single image", "def mem_report(): \"\"\" Returns available device and device properties \"\"\" print(\"CPU RAM Free:", "def __init__(self, dl, device): self.dl = dl self.device = device def __iter__(self): \"\"\"", "batches \"\"\" return len(self.dl) def mem_report(): \"\"\" Returns available device and device properties", "Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default device \"\"\" if torch.cuda.is_available(): return", "DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and sends a pytorch dataloader", "def __len__(self): \"\"\" Number of batches \"\"\" return len(self.dl) def mem_report(): \"\"\" Returns", "torch.device('cuda') else: return torch.device('cpu') def to_device(data, device): \"\"\" Loads data onto default device", "batch of data to the device * :param param(dict): Batch parameters * :param", "load data onto :return (str): Class prediction for the image \"\"\" X =", "import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and sends a pytorch", "model, classMap, device): \"\"\" Predicts the class of a single image * :param", "to load data onto :return (str): Class prediction for the image \"\"\" X", "DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and sends a pytorch dataloader to current", "current default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def to_device(data,", "device(torch.device): Device to load to :return (torch.device): Data loaded onto default device \"\"\"", ":param device(torch.device): Device to load data onto :return (str): Class prediction for the", "| Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default device \"\"\" if torch.cuda.is_available():", "device, dataset): \"\"\" Loads a batch of data to the device * :param", "key, value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl =", "device and yield a single batched sample \"\"\" for b in self.dl: yield", "gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization", "if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def to_device(data, device): \"\"\" Loads data", "shuffle_, batch_size = [value for key, value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds", "the class of a single image * :param img(np.ndarray): Numpy array of pixel/channel", "Dataset to load * :param device(torch.device): Device to load to :return (torch.device): Data", "sends a pytorch dataloader to current device \"\"\" def __init__(self, dl, device): self.dl", "dl, device): self.dl = dl self.device = device def __iter__(self): \"\"\" Move dataloader", ":return (DeviceDataLoader): Batch data loaded onto default device \"\"\" path, shuffle_, batch_size =", ":param param(dict): Batch parameters * :param device(torch.device): Device to load to * :param", "device def __iter__(self): \"\"\" Move dataloader to device and yield a single batched", "return len(self.dl) def mem_report(): \"\"\" Returns available device and device properties \"\"\" print(\"CPU", "device_dl def predict_image(image, model, classMap, device): \"\"\" Predicts the class of a single", "else: return torch.device('cpu') def to_device(data, device): \"\"\" Loads data onto default device *", "x in data] return data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads a", "param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size,", "loaded onto default device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device) for x", "image \"\"\" X = to_device(image.unsqueeze(0), device) _, prediction = torch.max(model(X), dim=1) return classMap[prediction[0].item()]", "as tt from torchvision.datasets import ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\"", "array of pixel/channel values * :param model(torch.nn.module): Model * :param classMap(dict): Mapped class", "__iter__(self): \"\"\" Move dataloader to device and yield a single batched sample \"\"\"", "GPUs = GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free:", "def to_device(data, device): \"\"\" Loads data onto default device * :param data(torch.tensor): Dataset", "the image \"\"\" X = to_device(image.unsqueeze(0), device) _, prediction = torch.max(model(X), dim=1) return", "dl = DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl =", "\"\"\" Returns available device and device properties \"\"\" print(\"CPU RAM Free: \" +", "torch.device('cpu') def to_device(data, device): \"\"\" Loads data onto default device * :param data(torch.tensor):", "in self.dl: yield to_device(b, self.device) def __len__(self): \"\"\" Number of batches \"\"\" return", "image * :param img(np.ndarray): Numpy array of pixel/channel values * :param model(torch.nn.module): Model", "Batch data loaded onto default device \"\"\" path, shuffle_, batch_size = [value for", "values for prediction output * :param device(torch.device): Device to load data onto :return", "onto default device \"\"\" path, shuffle_, batch_size = [value for key, value in", "self.dl: yield to_device(b, self.device) def __len__(self): \"\"\" Number of batches \"\"\" return len(self.dl)", "b in self.dl: yield to_device(b, self.device) def __len__(self): \"\"\" Number of batches \"\"\"", "for key, value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl", "(DeviceDataLoader): Batch data loaded onto default device \"\"\" path, shuffle_, batch_size = [value", "self.device = device def __iter__(self): \"\"\" Move dataloader to device and yield a", "default device \"\"\" path, shuffle_, batch_size = [value for key, value in param.items()]", "* :param classMap(dict): Mapped class values for prediction output * :param device(torch.device): Device", "\"\"\" for b in self.dl: yield to_device(b, self.device) def __len__(self): \"\"\" Number of", "of a single image * :param img(np.ndarray): Numpy array of pixel/channel values *", "self.device) def __len__(self): \"\"\" Number of batches \"\"\" return len(self.dl) def mem_report(): \"\"\"", ":param device(torch.device): Device to load to * :param dataset(torch.tensor): Data to load :return", "sample \"\"\" for b in self.dl: yield to_device(b, self.device) def __len__(self): \"\"\" Number", "* :param model(torch.nn.module): Model * :param classMap(dict): Mapped class values for prediction output", "humanize, psutil, GPUtil, time, torch import torchvision.transforms as tt from torchvision.datasets import ImageFolder", "device) return device_dl def predict_image(image, model, classMap, device): \"\"\" Predicts the class of", "to device and yield a single batched sample \"\"\" for b in self.dl:", "* :param dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch data loaded onto default", "torchvision.datasets import ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ----------------------", "model(torch.nn.module): Model * :param classMap(dict): Mapped class values for prediction output * :param", "load to :return (torch.device): Data loaded onto default device \"\"\" if isinstance(data, (list,tuple)):", "a pytorch dataloader to current device \"\"\" def __init__(self, dl, device): self.dl =", "device properties \"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for", "(torch.device): Data loaded onto default device \"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device)", "torch import torchvision.transforms as tt from torchvision.datasets import ImageFolder from torch.utils.data import DataLoader", "\"\"\" DeviceDataLoader Class ---------------------- Wraps and sends a pytorch dataloader to current device", "humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem", "device) for x in data] return data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\"", "__len__(self): \"\"\" Number of batches \"\"\" return len(self.dl) def mem_report(): \"\"\" Returns available", "if isinstance(data, (list,tuple)): return [to_device(x, device) for x in data] return data.to(device, non_blocking=True)", "device * :param param(dict): Batch parameters * :param device(torch.device): Device to load to", "[value for key, value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms)", "shuffle = shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return device_dl def", "\"\"\" if torch.cuda.is_available(): return torch.device('cuda') else: return torch.device('cpu') def to_device(data, device): \"\"\" Loads", "param(dict): Batch parameters * :param device(torch.device): Device to load to * :param dataset(torch.tensor):", "return torch.device('cuda') else: return torch.device('cpu') def to_device(data, device): \"\"\" Loads data onto default", "{i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\"", "return data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads a batch of data", "for prediction output * :param device(torch.device): Device to load data onto :return (str):", "Wraps and sends a pytorch dataloader to current device \"\"\" def __init__(self, dl,", "print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu in", "available device and device properties \"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs", "predict_image(image, model, classMap, device): \"\"\" Predicts the class of a single image *", "Predicts the class of a single image * :param img(np.ndarray): Numpy array of", "in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%')", "loaded onto default device \"\"\" path, shuffle_, batch_size = [value for key, value", "and device properties \"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs()", "to load :return (DeviceDataLoader): Batch data loaded onto default device \"\"\" path, shuffle_,", "\"\"\" if isinstance(data, (list,tuple)): return [to_device(x, device) for x in data] return data.to(device,", "psutil, GPUtil, time, torch import torchvision.transforms as tt from torchvision.datasets import ImageFolder from", "ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader Class ---------------------- Wraps and", "class values for prediction output * :param device(torch.device): Device to load data onto", "+ humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU {i:d} ...", "Returns available device and device properties \"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available))", "data loaded onto default device \"\"\" path, shuffle_, batch_size = [value for key,", "\"\"\" Number of batches \"\"\" return len(self.dl) def mem_report(): \"\"\" Returns available device", "device): self.dl = dl self.device = device def __iter__(self): \"\"\" Move dataloader to", "* :param img(np.ndarray): Numpy array of pixel/channel values * :param model(torch.nn.module): Model *", "and sends a pytorch dataloader to current device \"\"\" def __init__(self, dl, device):", "= dl self.device = device def __iter__(self): \"\"\" Move dataloader to device and", ":param classMap(dict): Mapped class values for prediction output * :param device(torch.device): Device to", "isinstance(data, (list,tuple)): return [to_device(x, device) for x in data] return data.to(device, non_blocking=True) def", "Model * :param classMap(dict): Mapped class values for prediction output * :param device(torch.device):", "pytorch dataloader to current device \"\"\" def __init__(self, dl, device): self.dl = dl", "load to * :param dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch data loaded", "\" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu in enumerate(GPUs): print(f'GPU {i:d}", "data] return data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads a batch of", "Number of batches \"\"\" return len(self.dl) def mem_report(): \"\"\" Returns available device and", "Batch parameters * :param device(torch.device): Device to load to * :param dataset(torch.tensor): Data", "device): \"\"\" Predicts the class of a single image * :param img(np.ndarray): Numpy", "for b in self.dl: yield to_device(b, self.device) def __len__(self): \"\"\" Number of batches", "for x in data] return data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads", "def get_default_device(): \"\"\" Return current default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda') else:", "= [value for key, value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path,", "to load to * :param dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch data", "for i, gpu in enumerate(GPUs): print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB", "current device \"\"\" def __init__(self, dl, device): self.dl = dl self.device = device", "in data] return data.to(device, non_blocking=True) def load_set(param, device, dataset): \"\"\" Loads a batch", "tt from torchvision.datasets import ImageFolder from torch.utils.data import DataLoader class DeviceDataLoader(): \"\"\" DeviceDataLoader", "dataloader to device and yield a single batched sample \"\"\" for b in", "load_set(param, device, dataset): \"\"\" Loads a batch of data to the device *", "onto :return (str): Class prediction for the image \"\"\" X = to_device(image.unsqueeze(0), device)", "* :param param(dict): Batch parameters * :param device(torch.device): Device to load to *", "to the device * :param param(dict): Batch parameters * :param device(torch.device): Device to", "data(torch.tensor): Dataset to load * :param device(torch.device): Device to load to :return (torch.device):", "return [to_device(x, device) for x in data] return data.to(device, non_blocking=True) def load_set(param, device,", "= tt.Compose([tt.ToTensor()]) ds = ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size, shuffle =", "\"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs = GPUtil.getGPUs() for i, gpu", "/ {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default device \"\"\"", "device and device properties \"\"\" print(\"CPU RAM Free: \" + humanize.naturalsize(psutil.virtual_memory().available)) GPUs =", "{gpu.memoryUtil*100:3.0f}%') def get_default_device(): \"\"\" Return current default device \"\"\" if torch.cuda.is_available(): return torch.device('cuda')", "* :param device(torch.device): Device to load to :return (torch.device): Data loaded onto default", "batch_size = [value for key, value in param.items()] transforms = tt.Compose([tt.ToTensor()]) ds =", "to * :param dataset(torch.tensor): Data to load :return (DeviceDataLoader): Batch data loaded onto", "import torchvision.transforms as tt from torchvision.datasets import ImageFolder from torch.utils.data import DataLoader class", "= ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True", "ImageFolder(dataset+path, transforms) dl = DataLoader( ds, batch_size, shuffle = shuffle_, num_workers=8, pin_memory=True )", "= shuffle_, num_workers=8, pin_memory=True ) device_dl = DeviceDataLoader(dl, device) return device_dl def predict_image(image,", "values * :param model(torch.nn.module): Model * :param classMap(dict): Mapped class values for prediction", "classMap(dict): Mapped class values for prediction output * :param device(torch.device): Device to load", "return device_dl def predict_image(image, model, classMap, device): \"\"\" Predicts the class of a", "(str): Class prediction for the image \"\"\" X = to_device(image.unsqueeze(0), device) _, prediction", "\"\"\" return len(self.dl) def mem_report(): \"\"\" Returns available device and device properties \"\"\"", "print(f'GPU {i:d} ... Mem Free: {gpu.memoryFree:.0f}MB / {gpu.memoryTotal:.0f}MB | Utilization {gpu.memoryUtil*100:3.0f}%') def get_default_device():", "device \"\"\" path, shuffle_, batch_size = [value for key, value in param.items()] transforms" ]
[ "as a list of tuples, (Position, Ref, Alt) for now. # Mutations will", "= j def process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel to the end", "Any invalidation errors that flag this variant as successfully extracted, # but not", "# after the insertion # For deletions, the position is the position on", "CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description -----------------------------------------------------------------------------------------", "a list of CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op", "letters. USEARCH can read CIGAR strings using this operation, but does not generate", "not generate them. X BAM_CDIFF 8 Alignment column containing a mismatch, i.e. two", "of CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description", "than X's and ='s (see below). I BAM_CINS 1 Insertion (gap in the", "at the position it is mapped onto the reference # using read.query_alignment_start self.read_i", "- Index on the reference of where we want to crawl to \"\"\"", "and the same indel type j = i while j < len(self.mutation_str) and", "flexible and SNP-tolerant manner Modified and heavily trimmed down version of read_extractor.py (v0.1.0)", "back to 1-indexed, starting at the genome start pos = pos + 1", "soft clipping, where the full-length query sequence is given (field 10 in the", "generate them. X BAM_CDIFF 8 Alignment column containing a mismatch, i.e. two different", "alt)) continue # Check ahead for adjacent positions and the same indel type", "position on the reference # after the insertion # For deletions, the position", "X's and ='s (see below). I BAM_CINS 1 Insertion (gap in the query", "[] # Read data from the pysam.AlignedSegment object into python variables self.load_read() def", "later # to serialize into one big string self.mutation_str = [] # Any", "Use the CIGAR operations and other stats to stay on the same \"aligned\"", "identical letters. USEARCH can read CIGAR strings using this operation, but does not", "= reverse_complement(self.read_seq) # Don't try to do anything else if this read is", "0 Match (alignment column containing two letters). This could contain two different letters", "object into Python \"\"\" # Nucleotide sequence of the read self.read_seq = self.read.get_forward_sequence()", "( query_name, pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for m in adj_muts]),", "Insertion (gap in the query sequence). D BAM_CDEL 2 Deletion (gap in the", "adjacent indels adj_muts = self.mutation_str[i:j] # Combine bases, but keep first position and", "(Position, Ref, Alt) for now. # Mutations will be individually serialized then joined", "with hard clipping, where only the aligned segment of the query sequences is", "do not appear in a local alignment. H BAM_CHARD_CLIP 5 Segment of the", "https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column containing two", "code or a base mismatch for a # generic 0 OP code (", "does not appear in the alignment. This is used with soft clipping, where", "| No | | = | 7 | Yes | Yes | |", "read # using read.reference_start self.ref_i = self.read.reference_start # Start the read at the", "0 # Start the reference at the position it is mapped onto the", "| No | | H | 5 | No | No | |", "== 2 or op == 3: # Add deletion information to mutation string", "while self.ref_i < destination: # If we've reached the end of the CIGAR", "skip adding it if either the ref or the alt # is an", "# Start the reference at the position it is mapped onto the read", "m in adj_muts]), \"\".join([m[3] for m in adj_muts]), ) ) # Skip ahead", "mismatch, i.e. two different letters. USEARCH can read CIGAR strings using this operation,", "\"\".join([m[3] for m in adj_muts]), ) ) # Skip ahead to the end", "else: # Do nothing pass # Always iterate the CIGAR index self.cigar_i +=", "the ref or the alt # is an ambiguous base (N) # This", "= self.read.get_forward_sequence() # If reverse complement, flip the sequence and the quality scores", "First element of the tuple is the operation code # Second element of", "deletions, the position is the position on the reference # that was deleted", "contain two different letters (mismatch) or two identical letters. USEARCH generates CIGAR strings", "the quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try to do", "other stats to stay on the same \"aligned\" base (as if we did", "import numpy as np import pandas as pd from collections import defaultdict from", "Don't throw an error once we reach the end self.crawl_to(len(self.reference_seq)) self.get_dna_snps() return self.dna_snps", "array # Later when writing to disk we'll serialize this array as a", "we did a multiple sequence alignment on the read and ref) Parameters ----------", "Yes | Yes | | I | 1 | Yes | No |", "the reference S BAM_CSOFT_CLIP 4 Segment of the query sequence that does not", "| 4 | Yes | No | | H | 5 | No", "long and add it to the # master operations list self.cigar_ops.extend([op_group[0],] * op_group[1])", "SAM record) . In this case, S operations specify segments at the start", "= 0 def crawl_to(self, destination): \"\"\"Iterate (consume bases) through both the read and", "pysam.AlignedSegment \"\"\" self.read = read # Build our own mutation string to store", "ref or the alt # is an ambiguous base (N) # This is", "For insertions, the position is the position on the reference # after the", "1 # Get adjacent indels adj_muts = self.mutation_str[i:j] # Combine bases, but keep", "Soft Clip elif op == 1 or op == 4: # Add insertion", "the query sequence that does not appear in the alignment. This is used", "the reference # that was deleted # Store it as a list of", "to crawl to \"\"\" while self.ref_i < destination: # If we've reached the", "of reads if using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read =", "individually serialized then joined by ';' later # to serialize into one big", "the reference (0-indexed) # For insertions, the position is the position on the", "for a read (pysam.AlignedSegment) or a pair of reads if using paired-end sequencing", "\"\", self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion or Skip elif op ==", "not alt) ) # New position must be adjacent to the previous one", "2 | No | Yes | | N | 3 | No |", "record) . In this case, S operations specify segments at the start and/", "position (self.reference_seq[self.ref_i] != \"X\") ): # Add substitution information to mutation string self.mutation_str.append(", "+= 1 # Actually, skip adding it if either the ref or the", "for SNPs # If the OP code is 0, then we have to", "and ='s (see below). I BAM_CINS 1 Insertion (gap in the query sequence).", "| I | 1 | Yes | No | | D | 2", "hard clipping, where only the aligned segment of the query sequences is given", "== 8: # Check for SNPs # If the OP code is 0,", "if op == 0 or op == 7 or op == 8: #", "the aligned segment of the query sequences is given (field 10 in the", ") ) and # If the reference has an X as the base,", "is relative to the reference (0-indexed) # For insertions, the position is the", "MATCH - can be match or mismatch (SNP) if op == 0 or", "# Join adjacent indels self.dna_snps = [] i = 0 while i <", "? | --------------------------------------------------- \"\"\" # MATCH - can be match or mismatch (SNP)", "+= 1 # END WHILE def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" #", "scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions from a pysam AlignedSegment", "Alt) for now. # Mutations will be individually serialized then joined by ';'", "0 or op == 7 or op == 8: # Check for SNPs", "| Yes | | X | 8 | Yes | Yes | |", "self.mutation_str[j][2] and not ref) # Both deletions or (not self.mutation_str[j][3] and not alt)", "# Hard Clip, Padding else: # Do nothing pass # Always iterate the", "pos + 1 # If it's a SNP, then add and continue if", "7 | Yes | Yes | | X | 8 | Yes |", "appear in a local alignment. H BAM_CHARD_CLIP 5 Segment of the query sequence", "( # Check for a mismatch OP code or a base mismatch for", "master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar index self.cigar_i =", "ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list of CIGAR operations on the read", "alignment. This is used with soft clipping, where the full-length query sequence is", "| No | Yes | | S | 4 | Yes | No", "| M | 0 | Yes | Yes | | I | 1", "# For deletions, the position is the position on the reference # that", "the sequence and the quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't", "for m in adj_muts]), ) ) # Skip ahead to the end of", "| 9 | ? | ? | --------------------------------------------------- \"\"\" # MATCH - can", "is unmapped if self.read.is_unmapped: return # Get the reference sequence self.reference_seq = ReadExtractor.RefSeq", "stats to stay on the same \"aligned\" base (as if we did a", "# If reverse complement, flip the sequence and the quality scores if self.read.is_reverse:", "to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i +=", "destination: # If we've reached the end of the CIGAR string, break out", "if ref and alt: i += 1 # Actually, skip adding it if", "self.cigar_i += 1 # END WHILE def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\"", "# Always iterate the CIGAR index self.cigar_i += 1 # END WHILE def", "both the read and the reference Use the CIGAR operations and other stats", "reference S BAM_CSOFT_CLIP 4 Segment of the query sequence that does not appear", "or op == 8: # Check for SNPs # If the OP code", "the position back to 1-indexed, starting at the genome start pos = pos", "Yes | | X | 8 | Yes | Yes | | B", "manner Modified and heavily trimmed down version of read_extractor.py (v0.1.0) from the variant_extractor", "to the end of the read # so that we can collect additional", "5 Segment of the query sequence that does not appear in the alignment.", "if ( # Check for a mismatch OP code or a base mismatch", "for a mismatch OP code or a base mismatch for a # generic", "so that we can collect additional mutations (if they exist) # Don't throw", "No | No | | P | 6 | No | No |", "data in from the pysam.AlignedSegment object into Python \"\"\" # Nucleotide sequence of", "else if this read is unmapped if self.read.is_unmapped: return # Get the reference", "| | X | 8 | Yes | Yes | | B |", "# END WHILE def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" # Join adjacent", "OP code of 8, then we know there's a mismatch if ( #", "full-length query sequence is given (field 10 in the SAM record) . In", "from scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions from a pysam", "variable regions from an aligned segment, in a flexible and SNP-tolerant manner Modified", "for adjacent positions and the same indel type j = i while j", "= self.mutation_str[i:j] # Combine bases, but keep first position and type self.dna_snps.append( (", "end of the adjacent mutations i = j def process_all(self): \"\"\"Do everything, return", "and add it to the # master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) #", "it is mapped onto the read # using read.reference_start self.ref_i = self.read.reference_start #", "read and ref) Parameters ---------- destination: int - Index on the reference of", "at the start and/ or end of the query that do not appear", "7 Alignment column containing two identical letters. USEARCH can read CIGAR strings using", "self.load_read() def load_read(self): \"\"\"Load data in from the pysam.AlignedSegment object into Python \"\"\"", "# Format: Position:Ref:Alt;... # Where position is relative to the reference (0-indexed) #", "a base mismatch for a # generic 0 OP code ( (op ==", "at the start and/or end of the query that do not appear in", "aligned segment of the query sequences is given (field 10 in the SAM", "generic 0 OP code ( (op == 8) or ( op == 0", "query sequence that does not appear in the alignment. This is used with", "+= 1 # Deletion or Skip elif op == 2 or op ==", "Always iterate the CIGAR index self.cigar_i += 1 # END WHILE def get_dna_snps(self):", "Consume Read | Consume Reference | --------------------------------------------------- | M | 0 | Yes", "try to do anything else if this read is unmapped if self.read.is_unmapped: return", "tuple is the number of operations # Create a new list [# of", "or op == 7 or op == 8: # Check for SNPs #", "self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i += 1 # Insertion or Soft", "list of tuples, (Position, Ref, Alt) for now. # Mutations will be individually", "case, H operations specify segments at the start and/or end of the query", "# First element of the tuple is the operation code # Second element", "https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code | Consume Read | Consume Reference |", "read (pysam.AlignedSegment) or a pair of reads if using paired-end sequencing Parameters ----------", "will be stored in this array # Later when writing to disk we'll", "This is useless data bloat and should be removed as # early as", "or ( op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and #", "Check for SNPs # If the OP code is 0, then we have", "using this operation, but does not generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops", "reverse complement, flip the sequence and the quality scores if self.read.is_reverse: self.read_seq =", "H operations specify segments at the start and/or end of the query that", "down version of read_extractor.py (v0.1.0) from the variant_extractor project Author: <NAME> - Vector", "column containing a mismatch, i.e. two different letters. USEARCH can read CIGAR strings", "be stored in this array # Later when writing to disk we'll serialize", "element of the tuple is the operation code # Second element of the", "== 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and # If the reference", "sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list of CIGAR operations", "If reverse complement, flip the sequence and the quality scores if self.read.is_reverse: self.read_seq", "1 # If it's a SNP, then add and continue if ref and", "the position is the position on the reference # after the insertion #", "the extactor object for a read (pysam.AlignedSegment) or a pair of reads if", "i = j def process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel to the", "of where we want to crawl to \"\"\" while self.ref_i < destination: #", "given (field 10 in the SAM record) . In this case, S operations", "8 Alignment column containing a mismatch, i.e. two different letters. USEARCH can read", "insertion information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i +=", "np import pandas as pd from collections import defaultdict from scripts.util import translate,", "could contain two different letters (mismatch) or two identical letters. USEARCH generates CIGAR", "# that was deleted # Store it as a list of tuples, (Position,", "= i while j < len(self.mutation_str) and ( # Both insertions ( (not", "# Get adjacent indels adj_muts = self.mutation_str[i:j] # Combine bases, but keep first", "into one big string self.mutation_str = [] # Any invalidation errors that flag", "of the read self.read_seq = self.read.get_forward_sequence() # If reverse complement, flip the sequence", "destination): \"\"\"Iterate (consume bases) through both the read and the reference Use the", "\"\"\"Load data in from the pysam.AlignedSegment object into Python \"\"\" # Nucleotide sequence", "Consume Reference | --------------------------------------------------- | M | 0 | Yes | Yes |", "tuples to a list of CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html", "position must be adjacent to the previous one and self.mutation_str[j][1] == int(pos -", "| N | 3 | No | Yes | | S | 4", "read): \"\"\"Build the extactor object for a read (pysam.AlignedSegment) or a pair of", "used with soft clipping, where the full-length query sequence is given (field 10", "CIGAR strings using this operation, but does not generate them. B BAM_CBACK 9", "data from the pysam.AlignedSegment object into python variables self.load_read() def load_read(self): \"\"\"Load data", "read self.read_seq = self.read.get_forward_sequence() # If reverse complement, flip the sequence and the", "below). I BAM_CINS 1 Insertion (gap in the query sequence). D BAM_CDEL 2", "onto the reference # using read.query_alignment_start self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate", "substitution information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) )", "<reponame>a13xk13m/covidcg # coding: utf-8 \"\"\"Extract variable regions from an aligned segment, in a", "X as the base, then # ignore any SNPs at this position (self.reference_seq[self.ref_i]", "Vector Engineering Team (<EMAIL>) \"\"\" import numpy as np import pandas as pd", "sequence is given (field 10 in the SAM record) . In this case,", ") self.read_i += 1 self.ref_i += 1 # Insertion or Soft Clip elif", "them. X BAM_CDIFF 8 Alignment column containing a mismatch, i.e. two different letters.", "is the position on the reference # that was deleted # Store it", "deletion from padded reference) = BAM_CEQUAL 7 Alignment column containing two identical letters.", "| | B | 9 | ? | ? | --------------------------------------------------- \"\"\" #", "Second element of the tuple is the number of operations # Create a", "self.cigar_ops = [] for op_group in self.read.cigartuples: # First element of the tuple", "# Check for SNPs # If the OP code is 0, then we", "j def process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel to the end of", "the full-length query sequence is given (field 10 in the SAM record) .", "- can be match or mismatch (SNP) if op == 0 or op", "(SNP) if op == 0 or op == 7 or op == 8:", "see if there's a mismatch # If bowtie2 gave us the OP code", "SNPs/indels\"\"\" # Join adjacent indels self.dna_snps = [] i = 0 while i", "| Yes | | N | 3 | No | Yes | |", "on the reference of where we want to crawl to \"\"\" while self.ref_i", "| Yes | No | | D | 2 | No | Yes", "Yes | | S | 4 | Yes | No | | H", "# mut is a tuple: (Position, Ref, Alt) # Offset the position back", "in self.read.cigartuples: # First element of the tuple is the operation code #", "# Read data from the pysam.AlignedSegment object into python variables self.load_read() def load_read(self):", "i = 0 while i < len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i]", "a local alignment. H BAM_CHARD_CLIP 5 Segment of the query sequence that does", "data bloat and should be removed as # early as possible if alt", "the position on the reference # after the insertion # For deletions, the", "| | N | 3 | No | Yes | | S |", "letters (mismatch) or two identical letters. USEARCH generates CIGAR strings containing Ms rather", "or two identical letters. USEARCH generates CIGAR strings containing Ms rather than X's", "N | 3 | No | Yes | | S | 4 |", "reference has an X as the base, then # ignore any SNPs at", "operations] long and add it to the # master operations list self.cigar_ops.extend([op_group[0],] *", "2 or op == 3: # Add deletion information to mutation string self.mutation_str.append(", "| P | 6 | No | No | | = | 7", "in the SAM record. P BAM_CPAD 6 padding (silent deletion from padded reference)", "10 in the SAM record) . In this case, S operations specify segments", "S | 4 | Yes | No | | H | 5 |", "int(pos - 1 + (j - i)) ): j += 1 # Get", "or op == 4: # Add insertion information to mutation string self.mutation_str.append( (self.read.query_name,", "an ambiguous base (N) # This is useless data bloat and should be", "| | P | 6 | No | No | | = |", "adjacent mutations i = j def process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel", "from collections import defaultdict from scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable", "New position must be adjacent to the previous one and self.mutation_str[j][1] == int(pos", "read is unmapped if self.read.is_unmapped: return # Get the reference sequence self.reference_seq =", "https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column", "# Both deletions or (not self.mutation_str[j][3] and not alt) ) # New position", "| Yes | Yes | | X | 8 | Yes | Yes", "the SAM record. P BAM_CPAD 6 padding (silent deletion from padded reference) =", "Where position is relative to the reference (0-indexed) # For insertions, the position", "a pysam AlignedSegment \"\"\" RefSeq = \"\" def __init__(self, read): \"\"\"Build the extactor", "index self.cigar_i += 1 # END WHILE def get_dna_snps(self): \"\"\"Store list of NT", "information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i", "<NAME> - Vector Engineering Team (<EMAIL>) \"\"\" import numpy as np import pandas", "clipping, where the full-length query sequence is given (field 10 in the SAM", "query that do not appear in the SAM record. P BAM_CPAD 6 padding", "query sequences is given (field 10 in the SAM record). In this case,", "they exist) # Don't throw an error once we reach the end self.crawl_to(len(self.reference_seq))", "| 1 | Yes | No | | D | 2 | No", "Combine bases, but keep first position and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2]", "pass # Always iterate the CIGAR index self.cigar_i += 1 # END WHILE", "self.read.get_forward_sequence() # If reverse complement, flip the sequence and the quality scores if", "BAM_CEQUAL 7 Alignment column containing two identical letters. USEARCH can read CIGAR strings", "S BAM_CSOFT_CLIP 4 Segment of the query sequence that does not appear in", "a semicolon-delimited string self.invalid_errors = [] # Store SNPs self.dna_snps = [] #", "pos, ref, alt) = self.mutation_str[i] # mut is a tuple: (Position, Ref, Alt)", "# If the reference has an X as the base, then # ignore", "Yes | No | | D | 2 | No | Yes |", "there's a mismatch if ( # Check for a mismatch OP code or", "segments at the start and/or end of the query that do not appear", "sequence). D BAM_CDEL 2 Deletion (gap in the target sequence). N BAM_CREF_SKIP 3", "# Any invalidation errors that flag this variant as successfully extracted, # but", "and the reference Use the CIGAR operations and other stats to stay on", "# Since both the CIGAR and MD string don't fit our needs #", "where the full-length query sequence is given (field 10 in the SAM record)", "== 4: # Add insertion information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\",", "+= 1 # Insertion or Soft Clip elif op == 1 or op", "pos = pos + 1 # If it's a SNP, then add and", "type j = i while j < len(self.mutation_str) and ( # Both insertions", "9 | ? | ? | --------------------------------------------------- \"\"\" # MATCH - can be", "(as if we did a multiple sequence alignment on the read and ref)", "mapped onto the read # using read.reference_start self.ref_i = self.read.reference_start # Start the", "# If the OP code is 0, then we have to check both", "it is mapped onto the reference # using read.query_alignment_start self.read_i = 0 def", "add it to the # master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset", "# Don't throw an error once we reach the end self.crawl_to(len(self.reference_seq)) self.get_dna_snps() return", "is given (field 10 in the SAM record). In this case, H operations", "CIGAR strings containing Ms rather than X's and ='s (see below). I BAM_CINS", "self.read_i += 1 self.ref_i += 1 # Insertion or Soft Clip elif op", "we want to crawl to \"\"\" while self.ref_i < destination: # If we've", "of the query sequence that does not appear in the alignment. This is", "the target sequence). N BAM_CREF_SKIP 3 skipped region from the reference S BAM_CSOFT_CLIP", "Yes | | N | 3 | No | Yes | | S", "successfully extracted, # but not passing filters, will be stored in this array", "== 1 or op == 4: # Add insertion information to mutation string", "self.ref_i += 1 # Hard Clip, Padding else: # Do nothing pass #", "1 # Hard Clip, Padding else: # Do nothing pass # Always iterate", "# so that we can collect additional mutations (if they exist) # Don't", "in the target sequence). N BAM_CREF_SKIP 3 skipped region from the reference S", "(see below). I BAM_CINS 1 Insertion (gap in the query sequence). D BAM_CDEL", "sequences is given (field 10 in the SAM record). In this case, H", "sequence that does not appear in the alignment. This is used with soft", "indels self.dna_snps = [] i = 0 while i < len(self.mutation_str): (query_name, pos,", "base (N) # This is useless data bloat and should be removed as", "do anything else if this read is unmapped if self.read.is_unmapped: return # Get", "mut is a tuple: (Position, Ref, Alt) # Offset the position back to", "of operations] long and add it to the # master operations list self.cigar_ops.extend([op_group[0],]", "Skip elif op == 2 or op == 3: # Add deletion information", "mapped onto the reference # using read.query_alignment_start self.read_i = 0 def crawl_to(self, destination):", "the position on the reference # that was deleted # Store it as", "BAM_CREF_SKIP 3 skipped region from the reference S BAM_CSOFT_CLIP 4 Segment of the", "the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0", "BAM_CSOFT_CLIP 4 Segment of the query sequence that does not appear in the", "): j += 1 # Get adjacent indels adj_muts = self.mutation_str[i:j] # Combine", "rather than X's and ='s (see below). I BAM_CINS 1 Insertion (gap in", "# Travel to the end of the read # so that we can", "| | H | 5 | No | No | | P |", "mismatch OP code or a base mismatch for a # generic 0 OP", "(Position, Ref, Alt) # Offset the position back to 1-indexed, starting at the", "( (not self.mutation_str[j][2] and not ref) # Both deletions or (not self.mutation_str[j][3] and", "- Vector Engineering Team (<EMAIL>) \"\"\" import numpy as np import pandas as", "bloat and should be removed as # early as possible if alt not", "No | | D | 2 | No | Yes | | N", "string to store mutational information # Since both the CIGAR and MD string", "from the variant_extractor project Author: <NAME> - Vector Engineering Team (<EMAIL>) \"\"\" import", "when writing to disk we'll serialize this array as a semicolon-delimited string self.invalid_errors", "the current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op |", "query_name, pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for m in adj_muts]), )", "be removed as # early as possible if alt not in [\"A\", \"C\",", "def process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel to the end of the", "| | S | 4 | Yes | No | | H |", "i)) ): j += 1 # Get adjacent indels adj_muts = self.mutation_str[i:j] #", "heavily trimmed down version of read_extractor.py (v0.1.0) from the variant_extractor project Author: <NAME>", "( (op == 8) or ( op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i]", "the cigar index self.cigar_i = 0 # Start the reference at the position", "(gap in the target sequence). N BAM_CREF_SKIP 3 skipped region from the reference", "base (as if we did a multiple sequence alignment on the read and", "Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column containing two letters). This", "def load_read(self): \"\"\"Load data in from the pysam.AlignedSegment object into Python \"\"\" #", "i.e. two different letters. USEARCH can read CIGAR strings using this operation, but", "pysam.AlignedSegment object into Python \"\"\" # Nucleotide sequence of the read self.read_seq =", "No | | P | 6 | No | No | | =", "everything\"\"\" # Travel to the end of the read # so that we", "variables self.load_read() def load_read(self): \"\"\"Load data in from the pysam.AlignedSegment object into Python", "Modified and heavily trimmed down version of read_extractor.py (v0.1.0) from the variant_extractor project", "self.mutation_str[j][1] == int(pos - 1 + (j - i)) ): j += 1", "a tuple: (Position, Ref, Alt) # Offset the position back to 1-indexed, starting", "Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column containing two letters).", "| Yes | | B | 9 | ? | ? | ---------------------------------------------------", "\"\"\"Extract variable regions from an aligned segment, in a flexible and SNP-tolerant manner", "exist) # Don't throw an error once we reach the end self.crawl_to(len(self.reference_seq)) self.get_dna_snps()", "Op | Code | Consume Read | Consume Reference | --------------------------------------------------- | M", "have to check both the read # and the reference to see if", "the alignment. This is used with hard clipping, where only the aligned segment", "the operation code # Second element of the tuple is the number of", "? | ? | --------------------------------------------------- \"\"\" # MATCH - can be match or", "string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1 #", "is a tuple: (Position, Ref, Alt) # Offset the position back to 1-indexed,", "1 self.ref_i += 1 # Insertion or Soft Clip elif op == 1", "an aligned segment, in a flexible and SNP-tolerant manner Modified and heavily trimmed", "then joined by ';' later # to serialize into one big string self.mutation_str", "CIGAR string, break out if self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return", "operations specify segments at the start and/ or end of the query that", "different letters (mismatch) or two identical letters. USEARCH generates CIGAR strings containing Ms", "the adjacent mutations i = j def process_all(self): \"\"\"Do everything, return everything\"\"\" #", "(if they exist) # Don't throw an error once we reach the end", "will be individually serialized then joined by ';' later # to serialize into", "= [] for op_group in self.read.cigartuples: # First element of the tuple is", "to the previous one and self.mutation_str[j][1] == int(pos - 1 + (j -", "we've reached the end of the CIGAR string, break out if self.cigar_i >=", "op == 0 or op == 7 or op == 8: # Check", "the same \"aligned\" base (as if we did a multiple sequence alignment on", "column containing two identical letters. USEARCH can read CIGAR strings using this operation,", "[# of operations] long and add it to the # master operations list", "flag this variant as successfully extracted, # but not passing filters, will be", "op == 7 or op == 8: # Check for SNPs # If", "a flexible and SNP-tolerant manner Modified and heavily trimmed down version of read_extractor.py", "clipping, where only the aligned segment of the query sequences is given (field", "information # Since both the CIGAR and MD string don't fit our needs", "store mutational information # Since both the CIGAR and MD string don't fit", "='s (see below). I BAM_CINS 1 Insertion (gap in the query sequence). D", "Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read = read # Build our own mutation", "type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for m", "reference at the position it is mapped onto the read # using read.reference_start", "X BAM_CDIFF 8 Alignment column containing a mismatch, i.e. two different letters. USEARCH", "# Reset the cigar index self.cigar_i = 0 # Start the reference at", "trimmed down version of read_extractor.py (v0.1.0) from the variant_extractor project Author: <NAME> -", "| 8 | Yes | Yes | | B | 9 | ?", "and heavily trimmed down version of read_extractor.py (v0.1.0) from the variant_extractor project Author:", "on the reference # that was deleted # Store it as a list", "operations specify segments at the start and/or end of the query that do", "No | No | | = | 7 | Yes | Yes |", "should be removed as # early as possible if alt not in [\"A\",", "| Consume Reference | --------------------------------------------------- | M | 0 | Yes | Yes", "is the number of operations # Create a new list [# of operations]", "the end of the CIGAR string, break out if self.cigar_i >= len(self.cigar_ops) or", "self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i +=", "position it is mapped onto the read # using read.reference_start self.ref_i = self.read.reference_start", "elif op == 2 or op == 3: # Add deletion information to", "positions and the same indel type j = i while j < len(self.mutation_str)", "that do not appear in a local alignment. H BAM_CHARD_CLIP 5 Segment of", "target sequence). N BAM_CREF_SKIP 3 skipped region from the reference S BAM_CSOFT_CLIP 4", "as a semicolon-delimited string self.invalid_errors = [] # Store SNPs self.dna_snps = []", "letters). This could contain two different letters (mismatch) or two identical letters. USEARCH", "the query sequence). D BAM_CDEL 2 Deletion (gap in the target sequence). N", "continue # Check ahead for adjacent positions and the same indel type j", "through both the read and the reference Use the CIGAR operations and other", "in the query sequence). D BAM_CDEL 2 Deletion (gap in the target sequence).", "local alignment. H BAM_CHARD_CLIP 5 Segment of the query sequence that does not", "!= self.reference_seq[self.ref_i] ) ) and # If the reference has an X as", "(mismatch) or two identical letters. USEARCH generates CIGAR strings containing Ms rather than", "\"\"\" # Nucleotide sequence of the read self.read_seq = self.read.get_forward_sequence() # If reverse", "# Deletion or Skip elif op == 2 or op == 3: #", "B BAM_CBACK 9 \"\"\" self.cigar_ops = [] for op_group in self.read.cigartuples: # First", "of read_extractor.py (v0.1.0) from the variant_extractor project Author: <NAME> - Vector Engineering Team", "one and self.mutation_str[j][1] == int(pos - 1 + (j - i)) ): j", "the CIGAR operations and other stats to stay on the same \"aligned\" base", "| ? | ? | --------------------------------------------------- \"\"\" # MATCH - can be match", "BAM_CINS 1 Insertion (gap in the query sequence). D BAM_CDEL 2 Deletion (gap", "( # Both insertions ( (not self.mutation_str[j][2] and not ref) # Both deletions", "to store mutational information # Since both the CIGAR and MD string don't", "adj_muts]), ) ) # Skip ahead to the end of the adjacent mutations", "alignment. This is used with hard clipping, where only the aligned segment of", "containing two letters). This could contain two different letters (mismatch) or two identical", "onto the read # using read.reference_start self.ref_i = self.read.reference_start # Start the read", "CIGAR tuples to a list of CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples", "Insertion or Soft Clip elif op == 1 or op == 4: #", "self.mutation_str[i] # mut is a tuple: (Position, Ref, Alt) # Offset the position", "Ms rather than X's and ='s (see below). I BAM_CINS 1 Insertion (gap", "this case, H operations specify segments at the start and/or end of the", "errors that flag this variant as successfully extracted, # but not passing filters,", "alt not in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue", "a mismatch, i.e. two different letters. USEARCH can read CIGAR strings using this", "Start the reference at the position it is mapped onto the read #", "writing to disk we'll serialize this array as a semicolon-delimited string self.invalid_errors =", "Yes | No | | H | 5 | No | No |", "(<EMAIL>) \"\"\" import numpy as np import pandas as pd from collections import", "I | 1 | Yes | No | | D | 2 |", "Later when writing to disk we'll serialize this array as a semicolon-delimited string", "== 0 or op == 7 or op == 8: # Check for", "and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for", "self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1 # Hard Clip, Padding", "genome start pos = pos + 1 # If it's a SNP, then", "nothing pass # Always iterate the CIGAR index self.cigar_i += 1 # END", "element of the tuple is the number of operations # Create a new", "our needs # Format: Position:Ref:Alt;... # Where position is relative to the reference", "in the SAM record) . In this case, S operations specify segments at", "= 0 while i < len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i] #", "unmapped if self.read.is_unmapped: return # Get the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand", "get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps = []", "M BAM_CMATCH 0 Match (alignment column containing two letters). This could contain two", "( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1 # Hard Clip,", "Deletion (gap in the target sequence). N BAM_CREF_SKIP 3 skipped region from the", "self.read_i += 1 # Deletion or Skip elif op == 2 or op", "# For insertions, the position is the position on the reference # after", "the CIGAR index self.cigar_i += 1 # END WHILE def get_dna_snps(self): \"\"\"Store list", "i < len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i] # mut is a", "a multiple sequence alignment on the read and ref) Parameters ---------- destination: int", "big string self.mutation_str = [] # Any invalidation errors that flag this variant", "new list [# of operations] long and add it to the # master", "# Insertion or Soft Clip elif op == 1 or op == 4:", "(v0.1.0) from the variant_extractor project Author: <NAME> - Vector Engineering Team (<EMAIL>) \"\"\"", "# Offset the position back to 1-indexed, starting at the genome start pos", "segments at the start and/ or end of the query that do not", "| 7 | Yes | Yes | | X | 8 | Yes", "OP code ( (op == 8) or ( op == 0 and self.read_seq[self.read_i]", ") self.ref_i += 1 # Hard Clip, Padding else: # Do nothing pass", "end of the query that do not appear in a local alignment. H", "(N) # This is useless data bloat and should be removed as #", "case, S operations specify segments at the start and/ or end of the", "position it is mapped onto the reference # using read.query_alignment_start self.read_i = 0", "insertion # For deletions, the position is the position on the reference #", "\"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code | Consume Read | Consume Reference", "position is the position on the reference # that was deleted # Store", "8: # Check for SNPs # If the OP code is 0, then", "\"\"\"Expand CIGAR tuples to a list of CIGAR operations on the read (query)", "that we can collect additional mutations (if they exist) # Don't throw an", "skipped region from the reference S BAM_CSOFT_CLIP 4 Segment of the query sequence", "== 8) or ( op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) )", "self.ref_i += 1 # Insertion or Soft Clip elif op == 1 or", "| B | 9 | ? | ? | --------------------------------------------------- \"\"\" # MATCH", "If we've reached the end of the CIGAR string, break out if self.cigar_i", "# Build our own mutation string to store mutational information # Since both", "can be match or mismatch (SNP) if op == 0 or op ==", "two letters). This could contain two different letters (mismatch) or two identical letters.", "segment, in a flexible and SNP-tolerant manner Modified and heavily trimmed down version", "SNPs # If the OP code is 0, then we have to check", "# Check for a mismatch OP code or a base mismatch for a", "start and/ or end of the query that do not appear in a", "a mismatch if ( # Check for a mismatch OP code or a", "P BAM_CPAD 6 padding (silent deletion from padded reference) = BAM_CEQUAL 7 Alignment", "WHILE def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps", "read # Build our own mutation string to store mutational information # Since", "two different letters (mismatch) or two identical letters. USEARCH generates CIGAR strings containing", "does not generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops = [] for op_group", "Nucleotide sequence of the read self.read_seq = self.read.get_forward_sequence() # If reverse complement, flip", "In this case, S operations specify segments at the start and/ or end", "tuple is the operation code # Second element of the tuple is the", "it as a list of tuples, (Position, Ref, Alt) for now. # Mutations", "the tuple is the number of operations # Create a new list [#", "one big string self.mutation_str = [] # Any invalidation errors that flag this", "collect additional mutations (if they exist) # Don't throw an error once we", "if there's a mismatch # If bowtie2 gave us the OP code of", "coding: utf-8 \"\"\"Extract variable regions from an aligned segment, in a flexible and", "gave us the OP code of 8, then we know there's a mismatch", "self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i += 1 #", "operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar index self.cigar_i = 0", "reference of where we want to crawl to \"\"\" while self.ref_i < destination:", "in this array # Later when writing to disk we'll serialize this array", "first position and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m in adj_muts]),", "i += 1 # Actually, skip adding it if either the ref or", "to the end of the adjacent mutations i = j def process_all(self): \"\"\"Do", "bases) through both the read and the reference Use the CIGAR operations and", "code # Second element of the tuple is the number of operations #", "op == 3: # Add deletion information to mutation string self.mutation_str.append( ( self.read.query_name,", "Alt) # Offset the position back to 1-indexed, starting at the genome start", "string self.mutation_str = [] # Any invalidation errors that flag this variant as", "\"\"\" self.read = read # Build our own mutation string to store mutational", "read # and the reference to see if there's a mismatch # If", "mismatch # If bowtie2 gave us the OP code of 8, then we", "not appear in the alignment. This is used with soft clipping, where the", "def __init__(self, read): \"\"\"Build the extactor object for a read (pysam.AlignedSegment) or a", "= BAM_CEQUAL 7 Alignment column containing two identical letters. USEARCH can read CIGAR", "D BAM_CDEL 2 Deletion (gap in the target sequence). N BAM_CREF_SKIP 3 skipped", "# Add deletion information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\",", "the read # so that we can collect additional mutations (if they exist)", "serialize this array as a semicolon-delimited string self.invalid_errors = [] # Store SNPs", "== 3: # Add deletion information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i,", "after the insertion # For deletions, the position is the position on the", "6 padding (silent deletion from padded reference) = BAM_CEQUAL 7 Alignment column containing", "Hard Clip, Padding else: # Do nothing pass # Always iterate the CIGAR", "# Grab the current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- |", "of the CIGAR string, break out if self.cigar_i >= len(self.cigar_ops) or self.read_i >=", "current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code", "Build our own mutation string to store mutational information # Since both the", "- 1 + (j - i)) ): j += 1 # Get adjacent", "and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and # If the reference has an", "used with hard clipping, where only the aligned segment of the query sequences", "the read # using read.reference_start self.ref_i = self.read.reference_start # Start the read at", "start and/or end of the query that do not appear in the SAM", "# coding: utf-8 \"\"\"Extract variable regions from an aligned segment, in a flexible", "starting at the genome start pos = pos + 1 # If it's", "def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps =", "= ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list of CIGAR operations on the", "Parameters ---------- destination: int - Index on the reference of where we want", "self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion or Skip elif op == 2", "string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion or", "self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and # If the reference has an X", "but keep first position and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m", "or a base mismatch for a # generic 0 OP code ( (op", "the reference at the position it is mapped onto the read # using", "\"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue # Check ahead for adjacent", "query sequence). D BAM_CDEL 2 Deletion (gap in the target sequence). N BAM_CREF_SKIP", "USEARCH generates CIGAR strings containing Ms rather than X's and ='s (see below).", "read # so that we can collect additional mutations (if they exist) #", "indels adj_muts = self.mutation_str[i:j] # Combine bases, but keep first position and type", "string don't fit our needs # Format: Position:Ref:Alt;... # Where position is relative", "not appear in the alignment. This is used with hard clipping, where only", "operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M", "on the reference # after the insertion # For deletions, the position is", "indel type j = i while j < len(self.mutation_str) and ( # Both", "check both the read # and the reference to see if there's a", "(0-indexed) # For insertions, the position is the position on the reference #", "Read | Consume Reference | --------------------------------------------------- | M | 0 | Yes |", "\"\"\" self.cigar_ops = [] for op_group in self.read.cigartuples: # First element of the", "len(self.read_seq): return # Grab the current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf", "strings using this operation, but does not generate them. B BAM_CBACK 9 \"\"\"", "alt) ) # New position must be adjacent to the previous one and", "is used with soft clipping, where the full-length query sequence is given (field", "< destination: # If we've reached the end of the CIGAR string, break", "keep first position and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m in", "mutations (if they exist) # Don't throw an error once we reach the", "scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try to do anything else", "that does not appear in the alignment. This is used with soft clipping,", "appear in the alignment. This is used with soft clipping, where the full-length", "# If bowtie2 gave us the OP code of 8, then we know", "SNP, then add and continue if ref and alt: i += 1 #", "ahead to the end of the adjacent mutations i = j def process_all(self):", "index self.cigar_i = 0 # Start the reference at the position it is", "1-indexed, starting at the genome start pos = pos + 1 # If", "END WHILE def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" # Join adjacent indels", "is useless data bloat and should be removed as # early as possible", "# is an ambiguous base (N) # This is useless data bloat and", "ref and alt: i += 1 # Actually, skip adding it if either", "self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion or Skip elif op", "alignment on the read and ref) Parameters ---------- destination: int - Index on", "reached the end of the CIGAR string, break out if self.cigar_i >= len(self.cigar_ops)", "self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar index self.cigar_i = 0 # Start", "must be adjacent to the previous one and self.mutation_str[j][1] == int(pos - 1", "# ignore any SNPs at this position (self.reference_seq[self.ref_i] != \"X\") ): # Add", "reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list of CIGAR", "the query that do not appear in a local alignment. H BAM_CHARD_CLIP 5", "of the tuple is the number of operations # Create a new list", "Add insertion information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i", "mismatch for a # generic 0 OP code ( (op == 8) or", "the genome start pos = pos + 1 # If it's a SNP,", "the reference Use the CIGAR operations and other stats to stay on the", "= [] i = 0 while i < len(self.mutation_str): (query_name, pos, ref, alt)", "and ref) Parameters ---------- destination: int - Index on the reference of where", "and should be removed as # early as possible if alt not in", "Yes | Yes | | X | 8 | Yes | Yes |", "the position is the position on the reference # that was deleted #", "If the OP code is 0, then we have to check both the", "| Code | Consume Read | Consume Reference | --------------------------------------------------- | M |", "using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read = read # Build", "self.dna_snps.append((query_name, pos, ref, alt)) continue # Check ahead for adjacent positions and the", "No | Yes | | S | 4 | Yes | No |", "same indel type j = i while j < len(self.mutation_str) and ( #", "+= 1 self.ref_i += 1 # Insertion or Soft Clip elif op ==", "read_extractor.py (v0.1.0) from the variant_extractor project Author: <NAME> - Vector Engineering Team (<EMAIL>)", "| H | 5 | No | No | | P | 6", "Match (alignment column containing two letters). This could contain two different letters (mismatch)", "that does not appear in the alignment. This is used with hard clipping,", "the read # and the reference to see if there's a mismatch #", "the query sequences is given (field 10 in the SAM record). In this", "the reference to see if there's a mismatch # If bowtie2 gave us", "to stay on the same \"aligned\" base (as if we did a multiple", "into Python \"\"\" # Nucleotide sequence of the read self.read_seq = self.read.get_forward_sequence() #", "us the OP code of 8, then we know there's a mismatch if", "the start and/or end of the query that do not appear in the", "alt: i += 1 # Actually, skip adding it if either the ref", "Add deletion information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", )", "this variant as successfully extracted, # but not passing filters, will be stored", "import defaultdict from scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions from", "4 Segment of the query sequence that does not appear in the alignment.", "this operation, but does not generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops =", "0 | Yes | Yes | | I | 1 | Yes |", "D | 2 | No | Yes | | N | 3 |", "+ 1 # If it's a SNP, then add and continue if ref", "filters, will be stored in this array # Later when writing to disk", "SAM record). In this case, H operations specify segments at the start and/or", "self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion or Skip", "appear in the SAM record. P BAM_CPAD 6 padding (silent deletion from padded", "to do anything else if this read is unmapped if self.read.is_unmapped: return #", "self.read.cigartuples: # First element of the tuple is the operation code # Second", "# to serialize into one big string self.mutation_str = [] # Any invalidation", "project Author: <NAME> - Vector Engineering Team (<EMAIL>) \"\"\" import numpy as np", "and/or end of the query that do not appear in the SAM record.", "adding it if either the ref or the alt # is an ambiguous", "Create a new list [# of operations] long and add it to the", "j = i while j < len(self.mutation_str) and ( # Both insertions (", "now. # Mutations will be individually serialized then joined by ';' later #", "SNPs at this position (self.reference_seq[self.ref_i] != \"X\") ): # Add substitution information to", "< len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i] # mut is a tuple:", "self.read_seq = reverse_complement(self.read_seq) # Don't try to do anything else if this read", "1 Insertion (gap in the query sequence). D BAM_CDEL 2 Deletion (gap in", "BAM_CMATCH 0 Match (alignment column containing two letters). This could contain two different", "# Combine bases, but keep first position and type self.dna_snps.append( ( query_name, pos,", ") # New position must be adjacent to the previous one and self.mutation_str[j][1]", "| Yes | | S | 4 | Yes | No | |", "0, then we have to check both the read # and the reference", "# Start the read at the position it is mapped onto the reference", "---------- read: pysam.AlignedSegment \"\"\" self.read = read # Build our own mutation string", "in the alignment. This is used with soft clipping, where the full-length query", "that was deleted # Store it as a list of tuples, (Position, Ref,", "No | Yes | | N | 3 | No | Yes |", "disk we'll serialize this array as a semicolon-delimited string self.invalid_errors = [] #", "don't fit our needs # Format: Position:Ref:Alt;... # Where position is relative to", "| Yes | Yes | | I | 1 | Yes | No", "# If it's a SNP, then add and continue if ref and alt:", "them. B BAM_CBACK 9 \"\"\" self.cigar_ops = [] for op_group in self.read.cigartuples: #", "defaultdict from scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions from a", "not appear in a local alignment. H BAM_CHARD_CLIP 5 Segment of the query", "serialize into one big string self.mutation_str = [] # Any invalidation errors that", "If the reference has an X as the base, then # ignore any", "string, break out if self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return #", "passing filters, will be stored in this array # Later when writing to", "to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i +=", "in a flexible and SNP-tolerant manner Modified and heavily trimmed down version of", "(op == 8) or ( op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] )", "Code | Consume Read | Consume Reference | --------------------------------------------------- | M | 0", "list of NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps = [] i =", "# but not passing filters, will be stored in this array # Later", "self.read = read # Build our own mutation string to store mutational information", "for a # generic 0 OP code ( (op == 8) or (", "| | I | 1 | Yes | No | | D |", "the CIGAR string, break out if self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq):", "= | 7 | Yes | Yes | | X | 8 |", "operation, but does not generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops = []", "or the alt # is an ambiguous base (N) # This is useless", "this operation, but does not generate them. X BAM_CDIFF 8 Alignment column containing", "self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code | Consume Read | Consume", "is used with hard clipping, where only the aligned segment of the query", "ambiguous base (N) # This is useless data bloat and should be removed", "sequence). N BAM_CREF_SKIP 3 skipped region from the reference S BAM_CSOFT_CLIP 4 Segment", "the position it is mapped onto the read # using read.reference_start self.ref_i =", "that do not appear in the SAM record. P BAM_CPAD 6 padding (silent", "into python variables self.load_read() def load_read(self): \"\"\"Load data in from the pysam.AlignedSegment object", "paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read = read # Build our", "\"\"\" import numpy as np import pandas as pd from collections import defaultdict", "operations # Create a new list [# of operations] long and add it", "int - Index on the reference of where we want to crawl to", "string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i", "the variant_extractor project Author: <NAME> - Vector Engineering Team (<EMAIL>) \"\"\" import numpy", "# Store SNPs self.dna_snps = [] # Read data from the pysam.AlignedSegment object", "CIGAR strings using this operation, but does not generate them. X BAM_CDIFF 8", "self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate (consume bases) through both the read", "Reset the cigar index self.cigar_i = 0 # Start the reference at the", "version of read_extractor.py (v0.1.0) from the variant_extractor project Author: <NAME> - Vector Engineering", "list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar index self.cigar_i = 0 #", "was deleted # Store it as a list of tuples, (Position, Ref, Alt)", "is the operation code # Second element of the tuple is the number", "(j - i)) ): j += 1 # Get adjacent indels adj_muts =", "Python \"\"\" # Nucleotide sequence of the read self.read_seq = self.read.get_forward_sequence() # If", "self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return # Grab the current CIGAR", "segment of the query sequences is given (field 10 in the SAM record).", ") ) # Skip ahead to the end of the adjacent mutations i", "1 + (j - i)) ): j += 1 # Get adjacent indels", ") # Skip ahead to the end of the adjacent mutations i =", "the previous one and self.mutation_str[j][1] == int(pos - 1 + (j - i))", "( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i += 1", "( op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and # If", "mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1", "\"\"\" RefSeq = \"\" def __init__(self, read): \"\"\"Build the extactor object for a", "early as possible if alt not in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name,", "reference to see if there's a mismatch # If bowtie2 gave us the", "i while j < len(self.mutation_str) and ( # Both insertions ( (not self.mutation_str[j][2]", "+ (j - i)) ): j += 1 # Get adjacent indels adj_muts", "base, then # ignore any SNPs at this position (self.reference_seq[self.ref_i] != \"X\") ):", "This could contain two different letters (mismatch) or two identical letters. USEARCH generates", "| No | | P | 6 | No | No | |", "adjacent to the previous one and self.mutation_str[j][1] == int(pos - 1 + (j", "(not self.mutation_str[j][2] and not ref) # Both deletions or (not self.mutation_str[j][3] and not", "self.ref_i < destination: # If we've reached the end of the CIGAR string,", "we have to check both the read # and the reference to see", "Reference | --------------------------------------------------- | M | 0 | Yes | Yes | |", "match or mismatch (SNP) if op == 0 or op == 7 or", "specify segments at the start and/or end of the query that do not", "SAM record. P BAM_CPAD 6 padding (silent deletion from padded reference) = BAM_CEQUAL", "https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column containing", "stored in this array # Later when writing to disk we'll serialize this", "| Op | Code | Consume Read | Consume Reference | --------------------------------------------------- |", "Padding else: # Do nothing pass # Always iterate the CIGAR index self.cigar_i", "# using read.query_alignment_start self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate (consume bases) through", "self.invalid_errors = [] # Store SNPs self.dna_snps = [] # Read data from", "Format: Position:Ref:Alt;... # Where position is relative to the reference (0-indexed) # For", "strings containing Ms rather than X's and ='s (see below). I BAM_CINS 1", "op_group[1]) # Reset the cigar index self.cigar_i = 0 # Start the reference", "\"\"\" # MATCH - can be match or mismatch (SNP) if op ==", "to disk we'll serialize this array as a semicolon-delimited string self.invalid_errors = []", "Clip elif op == 1 or op == 4: # Add insertion information", "translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions from a pysam AlignedSegment \"\"\" RefSeq", "self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1 # Hard", "| 5 | No | No | | P | 6 | No", "| | D | 2 | No | Yes | | N |", "OP code is 0, then we have to check both the read #", "tuples, (Position, Ref, Alt) for now. # Mutations will be individually serialized then", "the query that do not appear in the SAM record. P BAM_CPAD 6", "given (field 10 in the SAM record). In this case, H operations specify", ">= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return # Grab the current CIGAR operation", "self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try to do anything else if this", "tuple: (Position, Ref, Alt) # Offset the position back to 1-indexed, starting at", "the end of the read # so that we can collect additional mutations", "on the read and ref) Parameters ---------- destination: int - Index on the", "self.mutation_str[j][3] and not alt) ) # New position must be adjacent to the", "in the SAM record). In this case, H operations specify segments at the", "--------------------------------------------------- \"\"\" # MATCH - can be match or mismatch (SNP) if op", "1 or op == 4: # Add insertion information to mutation string self.mutation_str.append(", "(silent deletion from padded reference) = BAM_CEQUAL 7 Alignment column containing two identical", "and # If the reference has an X as the base, then #", "8) or ( op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and", "a mismatch OP code or a base mismatch for a # generic 0", "of tuples, (Position, Ref, Alt) for now. # Mutations will be individually serialized", "2 Deletion (gap in the target sequence). N BAM_CREF_SKIP 3 skipped region from", "alt # is an ambiguous base (N) # This is useless data bloat", "\"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue # Check ahead for adjacent positions", "for op_group in self.read.cigartuples: # First element of the tuple is the operation", "# Actually, skip adding it if either the ref or the alt #", "own mutation string to store mutational information # Since both the CIGAR and", "the start and/ or end of the query that do not appear in", "NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps = [] i = 0 while", "Grab the current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op", "1 | Yes | No | | D | 2 | No |", "variable regions from a pysam AlignedSegment \"\"\" RefSeq = \"\" def __init__(self, read):", "\"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue # Check ahead for", "or op == 3: # Add deletion information to mutation string self.mutation_str.append( (", "j += 1 # Get adjacent indels adj_muts = self.mutation_str[i:j] # Combine bases,", "= [] # Store SNPs self.dna_snps = [] # Read data from the", "3: # Add deletion information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i],", "# using read.reference_start self.ref_i = self.read.reference_start # Start the read at the position", "| X | 8 | Yes | Yes | | B | 9", "alignment. H BAM_CHARD_CLIP 5 Segment of the query sequence that does not appear", "This is used with soft clipping, where the full-length query sequence is given", "variant_extractor project Author: <NAME> - Vector Engineering Team (<EMAIL>) \"\"\" import numpy as", "# Don't try to do anything else if this read is unmapped if", "with soft clipping, where the full-length query sequence is given (field 10 in", "I BAM_CINS 1 Insertion (gap in the query sequence). D BAM_CDEL 2 Deletion", "operation code # Second element of the tuple is the number of operations", "read.reference_start self.ref_i = self.read.reference_start # Start the read at the position it is", "self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i += 1 # Insertion", "adjacent indels self.dna_snps = [] i = 0 while i < len(self.mutation_str): (query_name,", "operations and other stats to stay on the same \"aligned\" base (as if", "this position (self.reference_seq[self.ref_i] != \"X\") ): # Add substitution information to mutation string", "No | | = | 7 | Yes | Yes | | X", "Alignment column containing two identical letters. USEARCH can read CIGAR strings using this", "(query_name, pos, ref, alt) = self.mutation_str[i] # mut is a tuple: (Position, Ref,", "the OP code is 0, then we have to check both the read", "reverse_complement class ReadExtractor: \"\"\"Extract variable regions from a pysam AlignedSegment \"\"\" RefSeq =", "this read is unmapped if self.read.is_unmapped: return # Get the reference sequence self.reference_seq", "Engineering Team (<EMAIL>) \"\"\" import numpy as np import pandas as pd from", "# Add substitution information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i],", ". In this case, S operations specify segments at the start and/ or", "P | 6 | No | No | | = | 7 |", "return # Get the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to", "| ? | --------------------------------------------------- \"\"\" # MATCH - can be match or mismatch", "a list of tuples, (Position, Ref, Alt) for now. # Mutations will be", "the pysam.AlignedSegment object into python variables self.load_read() def load_read(self): \"\"\"Load data in from", "alt) = self.mutation_str[i] # mut is a tuple: (Position, Ref, Alt) # Offset", "self.mutation_str = [] # Any invalidation errors that flag this variant as successfully", "the read at the position it is mapped onto the reference # using", "a pair of reads if using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\"", "of the adjacent mutations i = j def process_all(self): \"\"\"Do everything, return everything\"\"\"", "# Later when writing to disk we'll serialize this array as a semicolon-delimited", "if alt not in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt))", "position back to 1-indexed, starting at the genome start pos = pos +", "| Yes | Yes | | B | 9 | ? | ?", "8, then we know there's a mismatch if ( # Check for a", "aligned segment, in a flexible and SNP-tolerant manner Modified and heavily trimmed down", "self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for m in", "then we know there's a mismatch if ( # Check for a mismatch", "read CIGAR strings using this operation, but does not generate them. B BAM_CBACK", "add and continue if ref and alt: i += 1 # Actually, skip", "pysam.AlignedSegment object into python variables self.load_read() def load_read(self): \"\"\"Load data in from the", "\"\"\" while self.ref_i < destination: # If we've reached the end of the", "the reference has an X as the base, then # ignore any SNPs", "B | 9 | ? | ? | --------------------------------------------------- \"\"\" # MATCH -", "object into python variables self.load_read() def load_read(self): \"\"\"Load data in from the pysam.AlignedSegment", "either the ref or the alt # is an ambiguous base (N) #", "and the reference to see if there's a mismatch # If bowtie2 gave", "from the pysam.AlignedSegment object into python variables self.load_read() def load_read(self): \"\"\"Load data in", "can collect additional mutations (if they exist) # Don't throw an error once", "For deletions, the position is the position on the reference # that was", "or Skip elif op == 2 or op == 3: # Add deletion", "utf-8 \"\"\"Extract variable regions from an aligned segment, in a flexible and SNP-tolerant", "(consume bases) through both the read and the reference Use the CIGAR operations", "# MATCH - can be match or mismatch (SNP) if op == 0", "reverse_complement(self.read_seq) # Don't try to do anything else if this read is unmapped", "as successfully extracted, # but not passing filters, will be stored in this", "and the quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try to", "| --------------------------------------------------- | M | 0 | Yes | Yes | | I", "If it's a SNP, then add and continue if ref and alt: i", "(alignment column containing two letters). This could contain two different letters (mismatch) or", "generates CIGAR strings containing Ms rather than X's and ='s (see below). I", "record. P BAM_CPAD 6 padding (silent deletion from padded reference) = BAM_CEQUAL 7", "0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and # If the reference has", "Yes | | I | 1 | Yes | No | | D", "BAM_CBACK 9 \"\"\" self.cigar_ops = [] for op_group in self.read.cigartuples: # First element", "# early as possible if alt not in [\"A\", \"C\", \"G\", \"T\"]: continue", "insertions ( (not self.mutation_str[j][2] and not ref) # Both deletions or (not self.mutation_str[j][3]", "anything else if this read is unmapped if self.read.is_unmapped: return # Get the", "iterate the CIGAR index self.cigar_i += 1 # END WHILE def get_dna_snps(self): \"\"\"Store", "that flag this variant as successfully extracted, # but not passing filters, will", "we'll serialize this array as a semicolon-delimited string self.invalid_errors = [] # Store", "import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions from a pysam AlignedSegment \"\"\"", "for m in adj_muts]), \"\".join([m[3] for m in adj_muts]), ) ) # Skip", "self.dna_snps = [] # Read data from the pysam.AlignedSegment object into python variables", "AlignedSegment \"\"\" RefSeq = \"\" def __init__(self, read): \"\"\"Build the extactor object for", "BAM_CHARD_CLIP 5 Segment of the query sequence that does not appear in the", "an X as the base, then # ignore any SNPs at this position", "from padded reference) = BAM_CEQUAL 7 Alignment column containing two identical letters. USEARCH", "Since both the CIGAR and MD string don't fit our needs # Format:", "BAM_CPAD 6 padding (silent deletion from padded reference) = BAM_CEQUAL 7 Alignment column", "# Add insertion information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) )", "0 while i < len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i] # mut", "Index on the reference of where we want to crawl to \"\"\" while", "if self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return # Grab the current", "# Where position is relative to the reference (0-indexed) # For insertions, the", "(self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion or Skip elif", "< len(self.mutation_str) and ( # Both insertions ( (not self.mutation_str[j][2] and not ref)", "only the aligned segment of the query sequences is given (field 10 in", "flip the sequence and the quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) #", "# Get the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a", "the OP code of 8, then we know there's a mismatch if (", "import pandas as pd from collections import defaultdict from scripts.util import translate, reverse_complement", "# Nucleotide sequence of the read self.read_seq = self.read.get_forward_sequence() # If reverse complement,", "deleted # Store it as a list of tuples, (Position, Ref, Alt) for", "reference Use the CIGAR operations and other stats to stay on the same", "| 6 | No | No | | = | 7 | Yes", "Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column containing two letters). This could", "# and the reference to see if there's a mismatch # If bowtie2", "is given (field 10 in the SAM record) . In this case, S", "(self.reference_seq[self.ref_i] != \"X\") ): # Add substitution information to mutation string self.mutation_str.append( (", "Segment of the query sequence that does not appear in the alignment. This", "in adj_muts]), ) ) # Skip ahead to the end of the adjacent", "code ( (op == 8) or ( op == 0 and self.read_seq[self.read_i] !=", "or self.read_i >= len(self.read_seq): return # Grab the current CIGAR operation op =", "op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code | Consume Read", "j < len(self.mutation_str) and ( # Both insertions ( (not self.mutation_str[j][2] and not", "# New position must be adjacent to the previous one and self.mutation_str[j][1] ==", "two identical letters. USEARCH generates CIGAR strings containing Ms rather than X's and", "pos, ref, alt)) continue # Check ahead for adjacent positions and the same", "1 # Actually, skip adding it if either the ref or the alt", "CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code |", "load_read(self): \"\"\"Load data in from the pysam.AlignedSegment object into Python \"\"\" # Nucleotide", "Start the read at the position it is mapped onto the reference #", "any SNPs at this position (self.reference_seq[self.ref_i] != \"X\") ): # Add substitution information", "if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try to do anything else if", "numpy as np import pandas as pd from collections import defaultdict from scripts.util", "# If we've reached the end of the CIGAR string, break out if", "self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i += 1 self.ref_i += 1 # Insertion or", "SNP-tolerant manner Modified and heavily trimmed down version of read_extractor.py (v0.1.0) from the", "at this position (self.reference_seq[self.ref_i] != \"X\") ): # Add substitution information to mutation", "1 # Deletion or Skip elif op == 2 or op == 3:", "know there's a mismatch if ( # Check for a mismatch OP code", "op_group in self.read.cigartuples: # First element of the tuple is the operation code", "Offset the position back to 1-indexed, starting at the genome start pos =", "* op_group[1]) # Reset the cigar index self.cigar_i = 0 # Start the", "9 \"\"\" self.cigar_ops = [] for op_group in self.read.cigartuples: # First element of", "self.read.reference_start # Start the read at the position it is mapped onto the", "4 | Yes | No | | H | 5 | No |", "0 def crawl_to(self, destination): \"\"\"Iterate (consume bases) through both the read and the", "query that do not appear in a local alignment. H BAM_CHARD_CLIP 5 Segment", "def crawl_to(self, destination): \"\"\"Iterate (consume bases) through both the read and the reference", "the read and the reference Use the CIGAR operations and other stats to", "CIGAR index self.cigar_i += 1 # END WHILE def get_dna_snps(self): \"\"\"Store list of", "then we have to check both the read # and the reference to", "| Consume Read | Consume Reference | --------------------------------------------------- | M | 0 |", "region from the reference S BAM_CSOFT_CLIP 4 Segment of the query sequence that", "insertions, the position is the position on the reference # after the insertion", "(gap in the query sequence). D BAM_CDEL 2 Deletion (gap in the target", "of the query sequences is given (field 10 in the SAM record). In", "regions from a pysam AlignedSegment \"\"\" RefSeq = \"\" def __init__(self, read): \"\"\"Build", "ref, alt) = self.mutation_str[i] # mut is a tuple: (Position, Ref, Alt) #", "pd from collections import defaultdict from scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract", "deletion information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) )", "= self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code | Consume Read |", "the number of operations # Create a new list [# of operations] long", "the base, then # ignore any SNPs at this position (self.reference_seq[self.ref_i] != \"X\")", "return everything\"\"\" # Travel to the end of the read # so that", "= \"\" def __init__(self, read): \"\"\"Build the extactor object for a read (pysam.AlignedSegment)", "specify segments at the start and/ or end of the query that do", "# Do nothing pass # Always iterate the CIGAR index self.cigar_i += 1", "list [# of operations] long and add it to the # master operations", "| | = | 7 | Yes | Yes | | X |", "Check for a mismatch OP code or a base mismatch for a #", "# Mutations will be individually serialized then joined by ';' later # to", "of the query that do not appear in the SAM record. P BAM_CPAD", "from a pysam AlignedSegment \"\"\" RefSeq = \"\" def __init__(self, read): \"\"\"Build the", "m in adj_muts]), ) ) # Skip ahead to the end of the", "Do nothing pass # Always iterate the CIGAR index self.cigar_i += 1 #", "padding (silent deletion from padded reference) = BAM_CEQUAL 7 Alignment column containing two", "the position it is mapped onto the reference # using read.query_alignment_start self.read_i =", "Ref, Alt) for now. # Mutations will be individually serialized then joined by", "In this case, H operations specify segments at the start and/or end of", "mutations i = j def process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel to", "Both insertions ( (not self.mutation_str[j][2] and not ref) # Both deletions or (not", "and continue if ref and alt: i += 1 # Actually, skip adding", "= pos + 1 # If it's a SNP, then add and continue", "regions from an aligned segment, in a flexible and SNP-tolerant manner Modified and", "4: # Add insertion information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i])", "ref) Parameters ---------- destination: int - Index on the reference of where we", "Add substitution information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], )", "\"aligned\" base (as if we did a multiple sequence alignment on the read", "on the same \"aligned\" base (as if we did a multiple sequence alignment", "both the read # and the reference to see if there's a mismatch", "Store it as a list of tuples, (Position, Ref, Alt) for now. #", "and ( # Both insertions ( (not self.mutation_str[j][2] and not ref) # Both", "pandas as pd from collections import defaultdict from scripts.util import translate, reverse_complement class", "op == 4: # Add insertion information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i,", "sequence and the quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try", "(pysam.AlignedSegment) or a pair of reads if using paired-end sequencing Parameters ---------- read:", "not ref) # Both deletions or (not self.mutation_str[j][3] and not alt) ) #", "generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops = [] for op_group in self.read.cigartuples:", "Travel to the end of the read # so that we can collect", "variant as successfully extracted, # but not passing filters, will be stored in", "op == 2 or op == 3: # Add deletion information to mutation", "and/ or end of the query that do not appear in a local", "(not self.mutation_str[j][3] and not alt) ) # New position must be adjacent to", "# Both insertions ( (not self.mutation_str[j][2] and not ref) # Both deletions or", "fit our needs # Format: Position:Ref:Alt;... # Where position is relative to the", "serialized then joined by ';' later # to serialize into one big string", "Check ahead for adjacent positions and the same indel type j = i", "can read CIGAR strings using this operation, but does not generate them. B", "read: pysam.AlignedSegment \"\"\" self.read = read # Build our own mutation string to", "self.read_seq = self.read.get_forward_sequence() # If reverse complement, flip the sequence and the quality", "self.cigar_i = 0 # Start the reference at the position it is mapped", "and not alt) ) # New position must be adjacent to the previous", "needs # Format: Position:Ref:Alt;... # Where position is relative to the reference (0-indexed)", "the CIGAR and MD string don't fit our needs # Format: Position:Ref:Alt;... #", "ahead for adjacent positions and the same indel type j = i while", "+= 1 # Get adjacent indels adj_muts = self.mutation_str[i:j] # Combine bases, but", "does not appear in the alignment. This is used with hard clipping, where", "[] # Any invalidation errors that flag this variant as successfully extracted, #", "__init__(self, read): \"\"\"Build the extactor object for a read (pysam.AlignedSegment) or a pair", "5 | No | No | | P | 6 | No |", "padded reference) = BAM_CEQUAL 7 Alignment column containing two identical letters. USEARCH can", "RefSeq = \"\" def __init__(self, read): \"\"\"Build the extactor object for a read", "\"\"\"Store list of NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps = [] i", "';' later # to serialize into one big string self.mutation_str = [] #", "at the genome start pos = pos + 1 # If it's a", "multiple sequence alignment on the read and ref) Parameters ---------- destination: int -", "# Check ahead for adjacent positions and the same indel type j =", "== 7 or op == 8: # Check for SNPs # If the", "but does not generate them. X BAM_CDIFF 8 Alignment column containing a mismatch,", "self.mutation_str[i:j] # Combine bases, but keep first position and type self.dna_snps.append( ( query_name,", "--------------------------------------------------- | M | 0 | Yes | Yes | | I |", "!= \"X\") ): # Add substitution information to mutation string self.mutation_str.append( ( self.read.query_name,", "the pysam.AlignedSegment object into Python \"\"\" # Nucleotide sequence of the read self.read_seq", "of the query that do not appear in a local alignment. H BAM_CHARD_CLIP", "reference) = BAM_CEQUAL 7 Alignment column containing two identical letters. USEARCH can read", "mutation string to store mutational information # Since both the CIGAR and MD", "the read self.read_seq = self.read.get_forward_sequence() # If reverse complement, flip the sequence and", "sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read = read # Build our own", "at the position it is mapped onto the read # using read.reference_start self.ref_i", "7 or op == 8: # Check for SNPs # If the OP", "end of the CIGAR string, break out if self.cigar_i >= len(self.cigar_ops) or self.read_i", "8 | Yes | Yes | | B | 9 | ? |", "| --------------------------------------------------- \"\"\" # MATCH - can be match or mismatch (SNP) if", "= [] # Any invalidation errors that flag this variant as successfully extracted,", "\"\", ) ) self.ref_i += 1 # Hard Clip, Padding else: # Do", "ref, alt)) continue # Check ahead for adjacent positions and the same indel", "3 | No | Yes | | S | 4 | Yes |", "---------- destination: int - Index on the reference of where we want to", "# Create a new list [# of operations] long and add it to", "continue if ref and alt: i += 1 # Actually, skip adding it", "code of 8, then we know there's a mismatch if ( # Check", "is mapped onto the reference # using read.query_alignment_start self.read_i = 0 def crawl_to(self,", "of 8, then we know there's a mismatch if ( # Check for", "\"X\") ): # Add substitution information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i,", "Author: <NAME> - Vector Engineering Team (<EMAIL>) \"\"\" import numpy as np import", "sequence of the read self.read_seq = self.read.get_forward_sequence() # If reverse complement, flip the", "M | 0 | Yes | Yes | | I | 1 |", "in from the pysam.AlignedSegment object into Python \"\"\" # Nucleotide sequence of the", "is 0, then we have to check both the read # and the", "| No | No | | P | 6 | No | No", "reference # that was deleted # Store it as a list of tuples,", "mismatch (SNP) if op == 0 or op == 7 or op ==", "same \"aligned\" base (as if we did a multiple sequence alignment on the", "to 1-indexed, starting at the genome start pos = pos + 1 #", "two different letters. USEARCH can read CIGAR strings using this operation, but does", "| = | 7 | Yes | Yes | | X | 8", "Skip ahead to the end of the adjacent mutations i = j def", "| No | Yes | | N | 3 | No | Yes", "self.dna_snps = [] i = 0 while i < len(self.mutation_str): (query_name, pos, ref,", "as pd from collections import defaultdict from scripts.util import translate, reverse_complement class ReadExtractor:", "want to crawl to \"\"\" while self.ref_i < destination: # If we've reached", "operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf --------------------------------------------------- | Op | Code | Consume", "read and the reference Use the CIGAR operations and other stats to stay", "the tuple is the operation code # Second element of the tuple is", "reference # using read.query_alignment_start self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate (consume bases)", "\"\"\"Build the extactor object for a read (pysam.AlignedSegment) or a pair of reads", "No | | H | 5 | No | No | | P", "destination: int - Index on the reference of where we want to crawl", "Get the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list", "it's a SNP, then add and continue if ref and alt: i +=", "sequence that does not appear in the alignment. This is used with hard", "but not passing filters, will be stored in this array # Later when", "to serialize into one big string self.mutation_str = [] # Any invalidation errors", "to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1 #", "Don't try to do anything else if this read is unmapped if self.read.is_unmapped:", "H | 5 | No | No | | P | 6 |", "python variables self.load_read() def load_read(self): \"\"\"Load data in from the pysam.AlignedSegment object into", "[\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue # Check ahead", "elif op == 1 or op == 4: # Add insertion information to", "# master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar index self.cigar_i", "MD string don't fit our needs # Format: Position:Ref:Alt;... # Where position is", "# Skip ahead to the end of the adjacent mutations i = j", "array as a semicolon-delimited string self.invalid_errors = [] # Store SNPs self.dna_snps =", "- i)) ): j += 1 # Get adjacent indels adj_muts = self.mutation_str[i:j]", "this case, S operations specify segments at the start and/ or end of", "in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue # Check", "the reference # after the insertion # For deletions, the position is the", "H BAM_CHARD_CLIP 5 Segment of the query sequence that does not appear in", "collections import defaultdict from scripts.util import translate, reverse_complement class ReadExtractor: \"\"\"Extract variable regions", "a mismatch # If bowtie2 gave us the OP code of 8, then", "class ReadExtractor: \"\"\"Extract variable regions from a pysam AlignedSegment \"\"\" RefSeq = \"\"", "Clip, Padding else: # Do nothing pass # Always iterate the CIGAR index", "position is relative to the reference (0-indexed) # For insertions, the position is", "BAM_CDEL 2 Deletion (gap in the target sequence). N BAM_CREF_SKIP 3 skipped region", "USEARCH can read CIGAR strings using this operation, but does not generate them.", "the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list of", "from an aligned segment, in a flexible and SNP-tolerant manner Modified and heavily", "ref) # Both deletions or (not self.mutation_str[j][3] and not alt) ) # New", "to see if there's a mismatch # If bowtie2 gave us the OP", "not generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops = [] for op_group in", "the SAM record) . In this case, S operations specify segments at the", "6 | No | No | | = | 7 | Yes |", "BAM_CDIFF 8 Alignment column containing a mismatch, i.e. two different letters. USEARCH can", "or (not self.mutation_str[j][3] and not alt) ) # New position must be adjacent", "SNPs self.dna_snps = [] # Read data from the pysam.AlignedSegment object into python", "Mutations will be individually serialized then joined by ';' later # to serialize", "# This is useless data bloat and should be removed as # early", "| Yes | | I | 1 | Yes | No | |", "ignore any SNPs at this position (self.reference_seq[self.ref_i] != \"X\") ): # Add substitution", "possible if alt not in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref,", "a read (pysam.AlignedSegment) or a pair of reads if using paired-end sequencing Parameters", "read CIGAR strings using this operation, but does not generate them. X BAM_CDIFF", "of the tuple is the operation code # Second element of the tuple", "end of the read # so that we can collect additional mutations (if", "if using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read = read #", "reference # after the insertion # For deletions, the position is the position", "the same indel type j = i while j < len(self.mutation_str) and (", "mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1 # Deletion", "end of the query that do not appear in the SAM record. P", "then add and continue if ref and alt: i += 1 # Actually,", "number of operations # Create a new list [# of operations] long and", "this array # Later when writing to disk we'll serialize this array as", "and self.mutation_str[j][1] == int(pos - 1 + (j - i)) ): j +=", "it if either the ref or the alt # is an ambiguous base", "= 0 # Start the reference at the position it is mapped onto", "appear in the alignment. This is used with hard clipping, where only the", "stay on the same \"aligned\" base (as if we did a multiple sequence", "using read.reference_start self.ref_i = self.read.reference_start # Start the read at the position it", "or Soft Clip elif op == 1 or op == 4: # Add", "Get adjacent indels adj_muts = self.mutation_str[i:j] # Combine bases, but keep first position", "everything, return everything\"\"\" # Travel to the end of the read # so", "code is 0, then we have to check both the read # and", "base mismatch for a # generic 0 OP code ( (op == 8)", "position is the position on the reference # after the insertion # For", "column containing two letters). This could contain two different letters (mismatch) or two", "containing two identical letters. USEARCH can read CIGAR strings using this operation, but", "read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match", "S operations specify segments at the start and/ or end of the query", "pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for m in adj_muts]), ) )", "Position:Ref:Alt;... # Where position is relative to the reference (0-indexed) # For insertions,", "of the read # so that we can collect additional mutations (if they", "Yes | | B | 9 | ? | ? | --------------------------------------------------- \"\"\"", "extracted, # but not passing filters, will be stored in this array #", "| No | No | | = | 7 | Yes | Yes", "CIGAR operations and other stats to stay on the same \"aligned\" base (as", "[] i = 0 while i < len(self.mutation_str): (query_name, pos, ref, alt) =", "relative to the reference (0-indexed) # For insertions, the position is the position", "to the reference (0-indexed) # For insertions, the position is the position on", "where we want to crawl to \"\"\" while self.ref_i < destination: # If", "using this operation, but does not generate them. X BAM_CDIFF 8 Alignment column", "to check both the read # and the reference to see if there's", "crawl to \"\"\" while self.ref_i < destination: # If we've reached the end", "op == 0 and self.read_seq[self.read_i] != self.reference_seq[self.ref_i] ) ) and # If the", "in adj_muts]), \"\".join([m[3] for m in adj_muts]), ) ) # Skip ahead to", "0 OP code ( (op == 8) or ( op == 0 and", ") self.read_i += 1 # Deletion or Skip elif op == 2 or", "information to mutation string self.mutation_str.append( (self.read.query_name, self.ref_i, \"\", self.read_seq[self.read_i]) ) self.read_i += 1", "query sequence is given (field 10 in the SAM record) . In this", "break out if self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return # Grab", "complement, flip the sequence and the quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq)", "----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment column containing two letters). This could contain", "reference (0-indexed) # For insertions, the position is the position on the reference", "self.read_i >= len(self.read_seq): return # Grab the current CIGAR operation op = self.cigar_ops[self.cigar_i]", "as # early as possible if alt not in [\"A\", \"C\", \"G\", \"T\"]:", "return # Grab the current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\" https://samtools.github.io/hts-specs/SAMv1.pdf ---------------------------------------------------", "removed as # early as possible if alt not in [\"A\", \"C\", \"G\",", "start pos = pos + 1 # If it's a SNP, then add", ") and # If the reference has an X as the base, then", "can read CIGAR strings using this operation, but does not generate them. X", "be adjacent to the previous one and self.mutation_str[j][1] == int(pos - 1 +", "mutational information # Since both the CIGAR and MD string don't fit our", "if this read is unmapped if self.read.is_unmapped: return # Get the reference sequence", "If bowtie2 gave us the OP code of 8, then we know there's", "Store SNPs self.dna_snps = [] # Read data from the pysam.AlignedSegment object into", "CIGAR and MD string don't fit our needs # Format: Position:Ref:Alt;... # Where", "our own mutation string to store mutational information # Since both the CIGAR", "is an ambiguous base (N) # This is useless data bloat and should", "list of CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code", "| 2 | No | Yes | | N | 3 | No", "of NT SNPs/indels\"\"\" # Join adjacent indels self.dna_snps = [] i = 0", "1 # END WHILE def get_dna_snps(self): \"\"\"Store list of NT SNPs/indels\"\"\" # Join", "extactor object for a read (pysam.AlignedSegment) or a pair of reads if using", "be match or mismatch (SNP) if op == 0 or op == 7", "crawl_to(self, destination): \"\"\"Iterate (consume bases) through both the read and the reference Use", "10 in the SAM record). In this case, H operations specify segments at", "is mapped onto the read # using read.reference_start self.ref_i = self.read.reference_start # Start", "3 skipped region from the reference S BAM_CSOFT_CLIP 4 Segment of the query", "and not ref) # Both deletions or (not self.mutation_str[j][3] and not alt) )", "1 # Insertion or Soft Clip elif op == 1 or op ==", "from the reference S BAM_CSOFT_CLIP 4 Segment of the query sequence that does", "the # master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar index", "where only the aligned segment of the query sequences is given (field 10", "information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], self.read_seq[self.read_i], ) ) self.read_i", "object for a read (pysam.AlignedSegment) or a pair of reads if using paired-end", "and other stats to stay on the same \"aligned\" base (as if we", "position on the reference # that was deleted # Store it as a", "a new list [# of operations] long and add it to the #", "pysam AlignedSegment \"\"\" RefSeq = \"\" def __init__(self, read): \"\"\"Build the extactor object", "reads if using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read = read", "adj_muts = self.mutation_str[i:j] # Combine bases, but keep first position and type self.dna_snps.append(", "process_all(self): \"\"\"Do everything, return everything\"\"\" # Travel to the end of the read", "+= 1 # Hard Clip, Padding else: # Do nothing pass # Always", "Ref, Alt) # Offset the position back to 1-indexed, starting at the genome", "if self.read.is_unmapped: return # Get the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR", "does not generate them. X BAM_CDIFF 8 Alignment column containing a mismatch, i.e.", "\"\" def __init__(self, read): \"\"\"Build the extactor object for a read (pysam.AlignedSegment) or", "while i < len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i] # mut is", "bases, but keep first position and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for", "N BAM_CREF_SKIP 3 skipped region from the reference S BAM_CSOFT_CLIP 4 Segment of", "both the CIGAR and MD string don't fit our needs # Format: Position:Ref:Alt;...", "continue self.dna_snps.append((query_name, pos, ref, alt)) continue # Check ahead for adjacent positions and", "\"\"\"Iterate (consume bases) through both the read and the reference Use the CIGAR", "we know there's a mismatch if ( # Check for a mismatch OP", "| S | 4 | Yes | No | | H | 5", "Yes | Yes | | B | 9 | ? | ? |", "| No | | D | 2 | No | Yes | |", "it to the # master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the", "additional mutations (if they exist) # Don't throw an error once we reach", "then # ignore any SNPs at this position (self.reference_seq[self.ref_i] != \"X\") ): #", "be individually serialized then joined by ';' later # to serialize into one", "the insertion # For deletions, the position is the position on the reference", "strings using this operation, but does not generate them. X BAM_CDIFF 8 Alignment", "not in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos, ref, alt)) continue #", "not appear in the SAM record. P BAM_CPAD 6 padding (silent deletion from", "the alt # is an ambiguous base (N) # This is useless data", "| D | 2 | No | Yes | | N | 3", "# Store it as a list of tuples, (Position, Ref, Alt) for now.", "operation, but does not generate them. X BAM_CDIFF 8 Alignment column containing a", "not passing filters, will be stored in this array # Later when writing", "self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples to a list of CIGAR operations on", "self.ref_i = self.read.reference_start # Start the read at the position it is mapped", "bowtie2 gave us the OP code of 8, then we know there's a", "op == 1 or op == 4: # Add insertion information to mutation", "= read # Build our own mutation string to store mutational information #", "useless data bloat and should be removed as # early as possible if", "different letters. USEARCH can read CIGAR strings using this operation, but does not", "(field 10 in the SAM record) . In this case, S operations specify", "identical letters. USEARCH generates CIGAR strings containing Ms rather than X's and ='s", "self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1 # Hard Clip, Padding else: #", "len(self.mutation_str): (query_name, pos, ref, alt) = self.mutation_str[i] # mut is a tuple: (Position,", "the read and ref) Parameters ---------- destination: int - Index on the reference", "read.query_alignment_start self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate (consume bases) through both the", "mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1", "adj_muts]), \"\".join([m[3] for m in adj_muts]), ) ) # Skip ahead to the", "using read.query_alignment_start self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate (consume bases) through both", "(query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH 0 Match (alignment", ") ) self.ref_i += 1 # Hard Clip, Padding else: # Do nothing", "| 3 | No | Yes | | S | 4 | Yes", "ReadExtractor: \"\"\"Extract variable regions from a pysam AlignedSegment \"\"\" RefSeq = \"\" def", "= self.read.reference_start # Start the read at the position it is mapped onto", "and alt: i += 1 # Actually, skip adding it if either the", "\"\"\"Extract variable regions from a pysam AlignedSegment \"\"\" RefSeq = \"\" def __init__(self,", "while j < len(self.mutation_str) and ( # Both insertions ( (not self.mutation_str[j][2] and", "to \"\"\" while self.ref_i < destination: # If we've reached the end of", "cigar index self.cigar_i = 0 # Start the reference at the position it", "containing Ms rather than X's and ='s (see below). I BAM_CINS 1 Insertion", "# Second element of the tuple is the number of operations # Create", "Join adjacent indels self.dna_snps = [] i = 0 while i < len(self.mutation_str):", "a # generic 0 OP code ( (op == 8) or ( op", "of operations # Create a new list [# of operations] long and add", "joined by ';' later # to serialize into one big string self.mutation_str =", "adjacent positions and the same indel type j = i while j <", "in a local alignment. H BAM_CHARD_CLIP 5 Segment of the query sequence that", "if we did a multiple sequence alignment on the read and ref) Parameters", "deletions or (not self.mutation_str[j][3] and not alt) ) # New position must be", "is the position on the reference # after the insertion # For deletions,", "out if self.cigar_i >= len(self.cigar_ops) or self.read_i >= len(self.read_seq): return # Grab the", "self.reference_seq[self.ref_i] ) ) and # If the reference has an X as the", "from the pysam.AlignedSegment object into Python \"\"\" # Nucleotide sequence of the read", "Alignment column containing a mismatch, i.e. two different letters. USEARCH can read CIGAR", "as the base, then # ignore any SNPs at this position (self.reference_seq[self.ref_i] !=", "did a multiple sequence alignment on the read and ref) Parameters ---------- destination:", ">= len(self.read_seq): return # Grab the current CIGAR operation op = self.cigar_ops[self.cigar_i] \"\"\"", "self.ref_i, self.reference_seq[self.ref_i], \"\", ) ) self.ref_i += 1 # Hard Clip, Padding else:", "and SNP-tolerant manner Modified and heavily trimmed down version of read_extractor.py (v0.1.0) from", "= self.mutation_str[i] # mut is a tuple: (Position, Ref, Alt) # Offset the", "Actually, skip adding it if either the ref or the alt # is", "we can collect additional mutations (if they exist) # Don't throw an error", "position and type self.dna_snps.append( ( query_name, pos, \"\".join([m[2] for m in adj_muts]), \"\".join([m[3]", "Read data from the pysam.AlignedSegment object into python variables self.load_read() def load_read(self): \"\"\"Load", "string self.invalid_errors = [] # Store SNPs self.dna_snps = [] # Read data", "| Yes | No | | H | 5 | No | No", "to the # master operations list self.cigar_ops.extend([op_group[0],] * op_group[1]) # Reset the cigar", "or end of the query that do not appear in a local alignment.", "if either the ref or the alt # is an ambiguous base (N)", "and MD string don't fit our needs # Format: Position:Ref:Alt;... # Where position", "len(self.mutation_str) and ( # Both insertions ( (not self.mutation_str[j][2] and not ref) #", "semicolon-delimited string self.invalid_errors = [] # Store SNPs self.dna_snps = [] # Read", "# generic 0 OP code ( (op == 8) or ( op ==", "on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf Op Code Description ----------------------------------------------------------------------------------------- M BAM_CMATCH", "[] for op_group in self.read.cigartuples: # First element of the tuple is the", "or a pair of reads if using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment", "has an X as the base, then # ignore any SNPs at this", "previous one and self.mutation_str[j][1] == int(pos - 1 + (j - i)) ):", "do not appear in the SAM record. P BAM_CPAD 6 padding (silent deletion", "op == 8: # Check for SNPs # If the OP code is", "containing a mismatch, i.e. two different letters. USEARCH can read CIGAR strings using", "quality scores if self.read.is_reverse: self.read_seq = reverse_complement(self.read_seq) # Don't try to do anything", "== int(pos - 1 + (j - i)) ): j += 1 #", "X | 8 | Yes | Yes | | B | 9 |", "self.read.is_unmapped: return # Get the reference sequence self.reference_seq = ReadExtractor.RefSeq \"\"\"Expand CIGAR tuples", "): # Add substitution information to mutation string self.mutation_str.append( ( self.read.query_name, self.ref_i, self.reference_seq[self.ref_i],", "len(self.cigar_ops) or self.read_i >= len(self.read_seq): return # Grab the current CIGAR operation op", "as possible if alt not in [\"A\", \"C\", \"G\", \"T\"]: continue self.dna_snps.append((query_name, pos,", "OP code or a base mismatch for a # generic 0 OP code", "or mismatch (SNP) if op == 0 or op == 7 or op", "(field 10 in the SAM record). In this case, H operations specify segments", "this array as a semicolon-delimited string self.invalid_errors = [] # Store SNPs self.dna_snps", "for now. # Mutations will be individually serialized then joined by ';' later", "the alignment. This is used with soft clipping, where the full-length query sequence", "\"\"\"Do everything, return everything\"\"\" # Travel to the end of the read #", "letters. USEARCH generates CIGAR strings containing Ms rather than X's and ='s (see", "Both deletions or (not self.mutation_str[j][3] and not alt) ) # New position must", ") ) self.read_i += 1 self.ref_i += 1 # Insertion or Soft Clip", "but does not generate them. B BAM_CBACK 9 \"\"\" self.cigar_ops = [] for", "Deletion or Skip elif op == 2 or op == 3: # Add", "to a list of CIGAR operations on the read (query) https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples https://drive5.com/usearch/manual/cigar.html https://samtools.github.io/hts-specs/SAMv1.pdf", "record). In this case, H operations specify segments at the start and/or end", "the SAM record). In this case, H operations specify segments at the start", "by ';' later # to serialize into one big string self.mutation_str = []", "sequence alignment on the read and ref) Parameters ---------- destination: int - Index", "--------------------------------------------------- | Op | Code | Consume Read | Consume Reference | ---------------------------------------------------", "there's a mismatch # If bowtie2 gave us the OP code of 8,", "= [] # Read data from the pysam.AlignedSegment object into python variables self.load_read()", "Team (<EMAIL>) \"\"\" import numpy as np import pandas as pd from collections", "\"\".join([m[2] for m in adj_muts]), \"\".join([m[3] for m in adj_muts]), ) ) #", "a SNP, then add and continue if ref and alt: i += 1", "invalidation errors that flag this variant as successfully extracted, # but not passing", "the end of the adjacent mutations i = j def process_all(self): \"\"\"Do everything,", "the reference of where we want to crawl to \"\"\" while self.ref_i <", "[] # Store SNPs self.dna_snps = [] # Read data from the pysam.AlignedSegment", "the reference # using read.query_alignment_start self.read_i = 0 def crawl_to(self, destination): \"\"\"Iterate (consume", "pair of reads if using paired-end sequencing Parameters ---------- read: pysam.AlignedSegment \"\"\" self.read", "as np import pandas as pd from collections import defaultdict from scripts.util import", "| 0 | Yes | Yes | | I | 1 | Yes", "in the alignment. This is used with hard clipping, where only the aligned", "two identical letters. USEARCH can read CIGAR strings using this operation, but does", "read at the position it is mapped onto the reference # using read.query_alignment_start", "mismatch if ( # Check for a mismatch OP code or a base", "This is used with hard clipping, where only the aligned segment of the" ]
[ "import uniform from math import hypot n = int(input('input n:')) m = 0", "= int(input('input n:')) m = 0 for i in range(n): d = hypot(uniform(0,1),uniform(0,1))", "for i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if d < 1: m+=1 print(float(m*4", "import hypot n = int(input('input n:')) m = 0 for i in range(n):", "from random import uniform from math import hypot n = int(input('input n:')) m", "0 for i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if d < 1: m+=1", "n:')) m = 0 for i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if d", "random import uniform from math import hypot n = int(input('input n:')) m =", "= 0 for i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if d < 1:", "i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if d < 1: m+=1 print(float(m*4 /n))", "int(input('input n:')) m = 0 for i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if", "n = int(input('input n:')) m = 0 for i in range(n): d =", "math import hypot n = int(input('input n:')) m = 0 for i in", "hypot n = int(input('input n:')) m = 0 for i in range(n): d", "m = 0 for i in range(n): d = hypot(uniform(0,1),uniform(0,1)) if d <", "uniform from math import hypot n = int(input('input n:')) m = 0 for", "from math import hypot n = int(input('input n:')) m = 0 for i" ]
[ "ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child", "id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid", "_VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\",", "_VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names =", ") @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path)", "real testing is via ``test_contextpath_getattr_valid``. This test is just to show a more", "child but invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid child\")", "is just to show a more human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\")", "def test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``. This test is just to", "val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context", "assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize(", "import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\",", "\"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def", "\"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", )", "testing is via ``test_contextpath_getattr_valid``. This test is just to show a more human-readable", "_VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\",", "\"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT)", "getattr(test, name) assert test == expected def test_contextpath_getattr_readable(): \"\"\"The real testing is via", "name) assert test == expected def test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``.", "assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but", "in names: test = getattr(test, name) assert test == expected def test_contextpath_getattr_readable(): \"\"\"The", "_VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath() for", "\"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\",", "Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True) assert repr(test) == \"Parameters(a='A',", "test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath() for name in", "\"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path)", "test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test =", "``test_contextpath_getattr_valid``. This test is just to show a more human-readable form. \"\"\" assert", "ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty", "def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True) assert repr(test) == \"Parameters(a='A', b=3, c=True)\"", "just to show a more human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert", "= path.split(\".\")[1:] test = ContextPath() for name in names: test = getattr(test, name)", "\"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\",", "ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\",", "is via ``test_contextpath_getattr_valid``. This test is just to show a more human-readable form.", "== ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\"))", "assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert", "+ tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS),", "ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid", "form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index ==", "\"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\",", "\"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\",", "import pytest from rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS =", ") def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr():", "\"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected", "Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\",", "name in names: test = getattr(test, name) assert test == expected def test_contextpath_getattr_readable():", "child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path)", "\"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\",", "more human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert", "pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix but", "names: test = getattr(test, name) assert test == expected def test_contextpath_getattr_readable(): \"\"\"The real", "Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True) assert repr(test) == \"Parameters(a='A', b=3,", "\"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\",", "ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz", "\"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) + tuple(pytest.param(val +", "via ``test_contextpath_getattr_valid``. This test is just to show a more human-readable form. \"\"\"", "+ ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT)", "show a more human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution ==", "ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\",", "pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\",", "\"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS +", "test == expected def test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``. This test", "\"\"\"Unit tests for ``rhodes.structures``.\"\"\" import pytest from rhodes.structures import ContextPath, Parameters pytestmark =", "invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo:", "for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid", "@pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath()", "def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test", "rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\",", "expected = ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath() for name in names:", "id=\"valid prefix but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with", "pytest from rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = (", "invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid child\") for val", "excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True) assert", "= ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath() for name in names: test", "ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) +", "(pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) + tuple(pytest.param(val + \".foo\",", "\"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected =", "= ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\",", "ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\")", "with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\",", "This test is just to show a more human-readable form. \"\"\" assert ContextPath()", "\"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path):", "human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index", "names = path.split(\".\")[1:] test = ContextPath() for name in names: test = getattr(test,", "\"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\")", "( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\",", "== ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\",", "= _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", )", "[pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\",", "assert test == expected def test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``. This", "for ``rhodes.structures``.\"\"\" import pytest from rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional]", "in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\")", "tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), )", "\"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + (", "@pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names", "ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:] test =", "as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True)", "== expected def test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``. This test is", "from rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\",", "== ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\") assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz ==", "ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True) assert repr(test)", "prefix but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError)", "test = getattr(test, name) assert test == expected def test_contextpath_getattr_readable(): \"\"\"The real testing", "tests for ``rhodes.structures``.\"\"\" import pytest from rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local,", "= getattr(test, name) assert test == expected def test_contextpath_getattr_readable(): \"\"\"The real testing is", "def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath() for name", "test is just to show a more human-readable form. \"\"\" assert ContextPath() ==", "but invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid child\") for", "\"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS", "test = ContextPath() for name in names: test = getattr(test, name) assert test", "test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:] test", "assert ContextPath().Map.Item.Index == ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"),", "= ContextPath() for name in names: test = getattr(test, name) assert test ==", "( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def", "+ \".foo\", id=\"valid prefix but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def", "\"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\",", "ContextPath(path=path) names = path.split(\".\")[1:] test = ContextPath() for name in names: test =", "\"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT =", "@pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) + tuple(pytest.param(val", "id=\"valid child but invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid", "\"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\",", "\"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT", "``rhodes.structures``.\"\"\" import pytest from rhodes.structures import ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS", "\"$$.Map.Item\", \"$$.Map.Item.Index\", ) _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\",", "path.split(\".\")[1:] test = ContextPath() for name in names: test = getattr(test, name) assert", "pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\",", "\"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\", \"$$.State.RetryCount\", \"$$.StateMachine\", \"$$.StateMachine.Id\", \"$$.Task\", \"$$.Task.Token\", \"$$.Map\", \"$$.Map.Item\", \"$$.Map.Item.Index\",", "expected def test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``. This test is just", "def test_contextpath_valid(path): ContextPath(path=path) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_getattr_valid(path): expected = ContextPath(path=path) names = path.split(\".\")[1:]", "a more human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution == ContextPath(\"$$.Execution\")", "== ContextPath(\"$$.Map.Item.Index\") assert ContextPath().Execution.Input.foo.bar.baz == ContextPath(\"$$.Execution.Input.foo.bar.baz\") @pytest.mark.parametrize( \"path\", (pytest.param(\"\", id=\"empty path\"), pytest.param(\"$.Execution\", id=\"valid", "_VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\",", "\"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\", \"$$.Map.Item.Value.foo.bar.baz\", ) @pytest.mark.parametrize(\"path\", _VALID_CONTEXT_PATHS_WITH_INPUT) def test_contextpath_valid(path):", "excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3, c=True) assert repr(test) ==", "to show a more human-readable form. \"\"\" assert ContextPath() == ContextPath(\"$$\") assert ContextPath().Execution", "ContextPath() for name in names: test = getattr(test, name) assert test == expected", "\".foo\", id=\"valid prefix but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path):", "_VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def", "path\"), pytest.param(\"$.Execution\", id=\"valid child but invalid root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix", "= [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\", \"$$.State\", \"$$.State.EnteredTime\", \"$$.State.Name\",", "pytest.raises(ValueError) as excinfo: ContextPath(path=path) excinfo.match(\"Invalid Context Path\") def test_parameters_repr(): test = Parameters(a=\"A\", b=3,", ") _VALID_CONTEXT_PATHS_WITH_INPUT = _VALID_STATIC_CONTEXT_PATHS + ( \"$$.Execution.Input\", \"$$.Execution.Input.foo\", \"$$.Execution.Input.foo.bar\", \"$$.Execution.Input.foo.bar.baz\", \"$$.Map.Item.Value\", \"$$.Map.Item.Value.foo\", \"$$.Map.Item.Value.foo.bar\",", "for name in names: test = getattr(test, name) assert test == expected def", "root\")) + tuple(pytest.param(val + \".foo\", id=\"valid prefix but invalid child\") for val in", "but invalid child\") for val in _VALID_STATIC_CONTEXT_PATHS), ) def test_contextpath_invalid(path): with pytest.raises(ValueError) as", "\"\"\"The real testing is via ``test_contextpath_getattr_valid``. This test is just to show a", "ContextPath, Parameters pytestmark = [pytest.mark.local, pytest.mark.functional] _VALID_STATIC_CONTEXT_PATHS = ( \"$$\", \"$$.Execution\", \"$$.Execution.Id\", \"$$.Execution.StartTime\",", "test_contextpath_getattr_readable(): \"\"\"The real testing is via ``test_contextpath_getattr_valid``. This test is just to show" ]
[ "abc import abstractmethod class UserParser: @abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod def", ":param user: :return: { username: 用户名 number: 学号或工号 avatar: 头像 } \"\"\" class", "@abstractmethod def get_user_info(self): \"\"\" :param user: :return: { username: 用户名 number: 学号或工号 avatar:", "@abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user: :return:", "} \"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user = user def get_user_info(self): user", "user: :return: { username: 用户名 number: 学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser):", "\"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user: :return: { username: 用户名 number:", "import abstractmethod class UserParser: @abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self):", "class DefaultUserParser(UserParser): def __init__(self, user): self.user = user def get_user_info(self): user = self.user", "def __init__(self, user): self.user = user def get_user_info(self): user = self.user return user['username'],", "def __init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user: :return: {", "\"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user = user def get_user_info(self): user =", "头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user = user def get_user_info(self):", "class UserParser: @abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param", "DefaultUserParser(UserParser): def __init__(self, user): self.user = user def get_user_info(self): user = self.user return", "\"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user: :return: { username: 用户名 number: 学号或工号", "{ username: 用户名 number: 学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self,", "user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user: :return: { username: 用户名", "from abc import abstractmethod class UserParser: @abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod", "abstractmethod class UserParser: @abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\"", "UserParser: @abstractmethod def __init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user:", "用户名 number: 学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user", "number: 学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user =", "学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user = user", "__init__(self, user): \"\"\" \"\"\" @abstractmethod def get_user_info(self): \"\"\" :param user: :return: { username:", "avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self, user): self.user = user def", "\"\"\" :param user: :return: { username: 用户名 number: 学号或工号 avatar: 头像 } \"\"\"", ":return: { username: 用户名 number: 学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def", "user): self.user = user def get_user_info(self): user = self.user return user['username'], user['number'], user['avatar']", "def get_user_info(self): \"\"\" :param user: :return: { username: 用户名 number: 学号或工号 avatar: 头像", "username: 用户名 number: 学号或工号 avatar: 头像 } \"\"\" class DefaultUserParser(UserParser): def __init__(self, user):", "__init__(self, user): self.user = user def get_user_info(self): user = self.user return user['username'], user['number'],", "get_user_info(self): \"\"\" :param user: :return: { username: 用户名 number: 学号或工号 avatar: 头像 }" ]
[ "('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92), ('factor -> WORD SPACE factor','factor',3,'p_factor_text','md2html.py',93), ]", "del _lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement ->", "= [ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement ->", "_lr_goto = { } for _k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]):", "('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression", "_lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k, _v in _lr_action_items.items(): for", "= {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k, _v in _lr_goto_items.items(): for _x,_y", "_lr_goto[_x] = { } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ (\"S'", "_lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement", "_k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto:", "# parsetab.py # This file is automatically generated. Do not edit. _tabversion =", "expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76),", "{ } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = {", "body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression", "= '3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action =", "} for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x", "statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74),", "= {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k, _v in _lr_action_items.items(): for _x,_y", "H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92), ('factor -> WORD SPACE", "factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77),", "_x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k]", "_lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66),", "= { } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto =", "= 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k, _v in", "in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] =", "zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y", "_lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [", "# This file is automatically generated. Do not edit. _tabversion = '3.2' _lr_method", "in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] =", "not _x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y del _lr_action_items", "-> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression ->", "automatically generated. Do not edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature =", "generated. Do not edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05'", "('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92), ('factor ->", "in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items =", "_v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x]", "('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression", "} _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { }", "('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression", "-> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67),", "This file is automatically generated. Do not edit. _tabversion = '3.2' _lr_method =", "_x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items", "if not _x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y del", "-> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92), ('factor -> WORD", "zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y", "= 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for", "expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2", "-> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression", "_y del _lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement", "('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression", "_lr_action[_x] = { } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto", "{'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k, _v in _lr_goto_items.items(): for _x,_y in", "factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78),", "not _x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y del _lr_goto_items", "= _y del _lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62),", "file is automatically generated. Do not edit. _tabversion = '3.2' _lr_method = 'LALR'", "_lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k, _v in _lr_goto_items.items(): for", "'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k, _v in _lr_action_items.items():", "_x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions", "(\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR", "_lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k, _v in _lr_goto_items.items():", "H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5", "CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3", "del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k, _v in", "-> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression ->", "for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] = { }", "for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in", "_lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),}", "= _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k,", "factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79),", "_k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action:", "-> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression ->", "_lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] = {", "{'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k, _v in _lr_action_items.items(): for _x,_y in", "[ (\"S' -> body\",\"S'\",1,None,None,None), ('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement", "-> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor ->", "'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k,", "is automatically generated. Do not edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature", "statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression ->", "'3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = {", "_lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = {", "_lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for", "-> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression ->", "('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75),", "for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = { }", "parsetab.py # This file is automatically generated. Do not edit. _tabversion = '3.2'", "for _k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in", "} for _k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x", "in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions =", "{ } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None),", "_y del _lr_action_items _lr_goto_items = {'body':([0,],[1,]),'expression':([0,17,],[9,19,]),'statement':([0,],[8,]),'factor':([2,3,4,5,6,7,18,],[11,12,13,14,15,16,20,]),} _lr_goto = { } for _k, _v", "H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6", "_lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body ->", "-> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression -> H1", "factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92), ('factor", "} _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ (\"S' -> body\",\"S'\",1,None,None,None), ('body", "if not _x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y del", "factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92), ('factor -> WORD SPACE factor','factor',3,'p_factor_text','md2html.py',93),", "H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor -> WORD','factor',1,'p_factor_text','md2html.py',92),", "_lr_action = { } for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]):", "= { } for _k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if", "_tabversion = '3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action", "_x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k]", "('body -> statement','body',1,'p_body','md2html.py',62), ('statement -> expression','statement',1,'p_state','md2html.py',66), ('statement -> statement CR expression','statement',3,'p_state','md2html.py',67), ('expression ->", "_lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { } for _k, _v", "not edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items =", "in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] =", "= { } for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if", "in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] =", "('expression -> H4 factor','expression',2,'p_exp_cr','md2html.py',77), ('expression -> H5 factor','expression',2,'p_exp_cr','md2html.py',78), ('expression -> H6 factor','expression',2,'p_exp_cr','md2html.py',79), ('factor", "= { } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ (\"S' ->", "_v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x]", "edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),}", "H1 factor','expression',2,'p_exp_cr','md2html.py',74), ('expression -> H2 factor','expression',2,'p_exp_cr','md2html.py',75), ('expression -> H3 factor','expression',2,'p_exp_cr','md2html.py',76), ('expression -> H4", "{ } for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not", "_lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items = {'WORD':([2,3,4,5,6,7,18,],[10,10,10,10,10,10,10,]),'SPACE':([10,],[18,]),'H2':([0,17,],[2,2,]),'H3':([0,17,],[3,3,]),'H1':([0,17,],[4,4,]),'H6':([0,17,],[5,5,]),'H4':([0,17,],[6,6,]),'H5':([0,17,],[7,7,]),'CR':([8,9,10,11,12,13,14,15,16,19,20,],[17,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),'$end':([1,8,9,10,11,12,13,14,15,16,19,20,],[0,-1,-2,-10,-5,-6,-4,-9,-7,-8,-3,-11,]),} _lr_action = { }", "Do not edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature = 'q\\xf5X\\xfc\\x8b\\xfa\\xfdP\\xca\\xd7\\xc4c\\xe9Bv\\x05' _lr_action_items", "<reponame>wangxiaoying/Compiler-Project # parsetab.py # This file is automatically generated. Do not edit. _tabversion", "{ } for _k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not" ]
[ "post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors (false negatives?) if post_review_result.status_code", "i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files and lines", "= list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1 for comments_chunk in actual_comments:", "of 100 pages (3000 files) for page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\"", "PURPOSE. See the # GNU General Public License for more details. # #", "pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for", "requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if", "# Copyright (C) 2021 Canonical Ltd # # This program is free software:", "= list() # Request a maximum of 100 pages (3000 files) for page_num", "%s\" % github_token, }, ) if pull_comments_result.status_code != requests.codes.ok: print( \"Request to get", "\"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\"", "pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict):", "github_api_url, repo, pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\":", "a maximum of 100 pages (3000 comments) for page_num in range(1, 101): pull_comments_url", "pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\"", "chunks from lst.\"\"\" for i in range(0, len(lst), n): yield lst[i: i +", "the Free Software Foundation. # # This program is distributed in the hope", "2021 Canonical Ltd # # This program is free software: you can redistribute", "str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk)", "is None: print(\"Couldn't get the files of this PR from GitHub\") return 1", "= json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file", "github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get the files of this PR from", ") pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token,", "pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical Ltd # # This program is", "= dict() for pull_request_file in pull_request_files: # Not all PR file metadata entries", "(false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments failed with error", "Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\" for i in range(0,", "response for large reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews =", "print(\"No new warnings found for this pull request.\") return 0 # Split the", "existing_pull_request_comments = list() # Request a maximum of 100 pages (3000 comments) for", ") print(\"Please report this error to the CI maintainer\") return 1 # Wait", "101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num, ) pull_files_result =", "it under the terms of the GNU General Public License version 3 as", "filter( lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and", "should have received a copy of the GNU General Public License # along", "the GNU General Public License # along with this program. If not, see", "pull_request_id, github_token): \"\"\"Lists which files and lines are allowed to receive comments, i.e.", "# Ignore comments on lines that were not changed in the pull request", "in this # pull request actual_comments = [ c for c in comments", "\"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_files_result.status_code != requests.codes.ok: print(", "pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url,", "and the ones not affecting files modified in the current pull_request_id.\"\"\" # #", "License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Source", "the existing review comments existing_pull_request_comments = list() # Request a maximum of 100", "dict() # Ignore comments on lines that were not changed in the pull", "without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "as a response for large reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment))", "if pull_files_result.status_code != requests.codes.ok: print( \"Request to get list of files failed with", "review_comments: dict): \"\"\" Posts a PR Review event from each 15 review_comments which", "modify # it under the terms of the GNU General Public License version", "Ignore bad gateway errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review", "files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review comments existing_pull_request_comments = list() # Request", "this PR from GitHub\") return 1 # Dismanteling the review_comments object for filtering", "(3000 files) for page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url,", "section # E.g., entries related to removed binary files may not contain it", "\"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url, headers={", "\"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token,", "pull request review comments, excluding the existing ones and the ones not affecting", "= requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, )", "= requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, )", "https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os import time import re import requests", "You should have received a copy of the GNU General Public License #", "current pull_request_id Pull Request.\"\"\" pull_request_files = list() # Request a maximum of 100", "print( \"Request to get list of files failed with error code: \" +", "get pull request comments failed with error code: \" + str(pull_comments_result.status_code) ) return", "new warnings found for this pull request.\") return 0 # Split the comments", "# Ignore bad gateway errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting", "comment on as the files weren't changed in this # pull request actual_comments", "review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict() #", "git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\")", "list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1 for comments_chunk in actual_comments: warning_comment", "Foundation. # # This program is distributed in the hope that it will", "a maximum of 100 pages (3000 files) for page_num in range(1, 101): pull_files_url", "details. # # You should have received a copy of the GNU General", "PR file metadata entries may contain a patch section # E.g., entries related", "return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude", "re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "and getting 502 server errors as a response for large reviews suggestions_per_comment =", "pull_request_files: # Not all PR file metadata entries may contain a patch section", "# Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os import", "receive comments, i.e. those modified by the current pull_request_id Pull Request.\"\"\" pull_request_files =", "the files weren't changed in this # pull request actual_comments = [ c", "getting 502 server errors as a response for large reviews suggestions_per_comment = 15", "that it will be useful, # but WITHOUT ANY WARRANTY; without even the", "post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments failed with error code: \" +", "# Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\" for i in", "lst[i: i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files and", "github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is", "# along with this program. If not, see <http://www.gnu.org/licenses/>. # # Source code", "# This program is free software: you can redistribute it and/or modify #", "comment in existing_pull_request_comments: actual_comments = list( filter( lambda review_comment: not ( review_comment[\"path\"] ==", "allowed to receive comments, i.e. those modified by the current pull_request_id Pull Request.\"\"\"", "in range(0, len(lst), n): yield lst[i: i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id,", "change in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] =", "in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num, )", "n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files and lines are allowed", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "to removed binary files may not contain it if \"patch\" not in pull_request_file:", "files failed with error code: \" + str(pull_files_result.status_code) ) return None pull_files_chunk =", "review_comments[\"comments\"] actual_comments = dict() # Ignore comments on lines that were not changed", "requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\",", "return 0 # Split the comments in chunks to avoid overloading the server", "along with this program. If not, see <http://www.gnu.org/licenses/>. # # Source code adapted", "metadata entries may contain a patch section # E.g., entries related to removed", "files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\" Posts a PR Review", "= 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1 for", "of the GNU General Public License version 3 as # published by the", "binary files may not contain it if \"patch\" not in pull_request_file: continue git_line_tags", "1 # Wait before posting all chunks so to avoid triggering abuse detection", "the hope that it will be useful, # but WITHOUT ANY WARRANTY; without", "for i in range(0, len(lst), n): yield lst[i: i + n] def _files_from_this_pr(github_api_url,", "it if \"patch\" not in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\",", "= \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get the", "( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\",", "filtering purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments =", "[ c for c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in", "failed with error code: \" + str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text)", "pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_comments_result.status_code", "(3000 comments) for page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url,", "def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files and lines are allowed to", "for pull_request_file in pull_request_files: # Not all PR file metadata entries may contain", "and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review comments existing_pull_request_comments =", "adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os import time import re", "# You should have received a copy of the GNU General Public License", "as # published by the Free Software Foundation. # # This program is", "comments) for page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository,", "== comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] ==", "-\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in", "Dismanteling the review_comments object for filtering purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"]", "files may not contain it if \"patch\" not in pull_request_file: continue git_line_tags =", "print( \"Posting review comments failed with error code: \" + str(post_review_result.status_code) ) print(\"Please", "] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\"", "server errors as a response for large reviews suggestions_per_comment = 15 actual_comments =", "pull_request_files = list() # Request a maximum of 100 pages (3000 files) for", "\"Authorization\": \"token %s\" % github_token, }, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) #", "in git_line_tags ] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), )", ") if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors (false negatives?)", "for page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id,", "code: \" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) ==", "\"\") for line_tag in git_line_tags ] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0])", "the terms of the GNU General Public License version 3 as # published", "this error to the CI maintainer\") return 1 # Wait before posting all", "= \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={", "print(\"Please report this error to the CI maintainer\") return 1 # Wait before", "pull_files_result.status_code != requests.codes.ok: print( \"Request to get list of files failed with error", "str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk)", "repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get the files of this", "(%i/%i)\") % (current_review, total_reviews) ) current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % (", "a copy of the GNU General Public License # along with this program.", "!= requests.codes.ok: print( \"Request to get list of files failed with error code:", "%s\" % github_token, }, ) if pull_files_result.status_code != requests.codes.ok: print( \"Request to get", "see <http://www.gnu.org/licenses/>. # # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json", "A PARTICULAR PURPOSE. See the # GNU General Public License for more details.", "purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict()", "post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={", "this program. If not, see <http://www.gnu.org/licenses/>. # # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments.", "\"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_comments_result.status_code != requests.codes.ok: print(", "return 1 # Wait before posting all chunks so to avoid triggering abuse", "pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ] lines_available_for_comments", "i.e. those modified by the current pull_request_id Pull Request.\"\"\" pull_request_files = list() #", "if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing", "total_reviews) ) current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id,", "= list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository:", "n-sized chunks from lst.\"\"\" for i in range(0, len(lst), n): yield lst[i: i", "software: you can redistribute it and/or modify # it under the terms of", "under the terms of the GNU General Public License version 3 as #", "the comments in chunks to avoid overloading the server # and getting 502", "pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files: # Not all PR file", "# Not all PR file metadata entries may contain a patch section #", "with error code: \" + str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text) if", "total_reviews = len(actual_comments) current_review = 1 for comments_chunk in actual_comments: warning_comment = (", "github_api_url, repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event,", "by the Free Software Foundation. # # This program is distributed in the", "# GNU General Public License for more details. # # You should have", "int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change in lines_and_changes ] lines_available_for_comments =", "actual_comments: warning_comment = ( (review_body + \" (%i/%i)\") % (current_review, total_reviews) ) current_review", "files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review comments existing_pull_request_comments", "in existing_pull_request_comments: actual_comments = list( filter( lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"]", "pull request actual_comments = [ c for c in comments if c[\"path\"] in", "break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for comment in existing_pull_request_comments: actual_comments =", "}, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if post_review_result.status_code", "request actual_comments = [ c for c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys()", "comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the", "Load the existing review comments existing_pull_request_comments = list() # Request a maximum of", "range(0, len(lst), n): yield lst[i: i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token):", "with error code: \" + str(post_review_result.status_code) ) print(\"Please report this error to the", "error to the CI maintainer\") return 1 # Wait before posting all chunks", "to receive comments, i.e. those modified by the current pull_request_id Pull Request.\"\"\" pull_request_files", "errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments failed with", "it and/or modify # it under the terms of the GNU General Public", "% github_token, }, ) if pull_files_result.status_code != requests.codes.ok: print( \"Request to get list", "large reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review", "comments failed with error code: \" + str(post_review_result.status_code) ) print(\"Please report this error", "excluding the existing ones and the ones not affecting files modified in the", "\\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags", "event from each 15 review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url =", "on as the files weren't changed in this # pull request actual_comments =", "(review_body + \" (%i/%i)\") % (current_review, total_reviews) ) current_review += 1 pull_request_reviews_url =", "to the CI maintainer\") return 1 # Wait before posting all chunks so", "def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\" Posts a PR Review event", "the existing ones and the ones not affecting files modified in the current", "free software: you can redistribute it and/or modify # it under the terms", "a response for large reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "to avoid overloading the server # and getting 502 server errors as a", "review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict() # Ignore comments on", "this # pull request actual_comments = [ c for c in comments if", "which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments", "\"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if post_review_result.status_code != requests.codes.ok:", "current pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical Ltd # # This program", "%s\" % github_token, }, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad", "None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict()", "len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files: #", "Request a maximum of 100 pages (3000 comments) for page_num in range(1, 101):", "PR Review event from each 15 review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\"", "changed in the pull request # Remove entries we cannot comment on as", "% github_token, }, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad gateway", "files weren't changed in this # pull request actual_comments = [ c for", "\" + str(post_review_result.status_code) ) print(\"Please report this error to the CI maintainer\") return", "continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\",", "break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files: # Not all PR", ") for change in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"]", "from each 15 review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\")", "return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\" Posts a PR", "\"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_files_result.status_code != requests.codes.ok:", "= os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if", "\"Authorization\": \"token %s\" % github_token, }, ) if pull_files_result.status_code != requests.codes.ok: print( \"Request", "review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"]", "dict): \"\"\" Posts a PR Review event from each 15 review_comments which matching", "comments for comment in existing_pull_request_comments: actual_comments = list( filter( lambda review_comment: not (", "actual_comments = [ c for c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and", "pull request # Remove entries we cannot comment on as the files weren't", "not contain it if \"patch\" not in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os import time import re import", "a PR Review event from each 15 review_comments which matching the output of", "\" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0:", "the files of this PR from GitHub\") return 1 # Dismanteling the review_comments", "if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for comment", "General Public License version 3 as # published by the Free Software Foundation.", "the CI maintainer\") return 1 # Wait before posting all chunks so to", "files and lines are allowed to receive comments, i.e. those modified by the", "review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\")", "program is free software: you can redistribute it and/or modify # it under", "+ \" (%i/%i)\") % (current_review, total_reviews) ) current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\"", "# pull request actual_comments = [ c for c in comments if c[\"path\"]", "files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get", "failed with error code: \" + str(post_review_result.status_code) ) print(\"Please report this error to", "warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" %", "files modified in the current pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical Ltd", "E.g., entries related to removed binary files may not contain it if \"patch\"", "get the files of this PR from GitHub\") return 1 # Dismanteling the", "modified in the current pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical Ltd #", "100 pages (3000 files) for page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" %", "modified by the current pull_request_id Pull Request.\"\"\" pull_request_files = list() # Request a", "page_num, ) pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" %", "each 15 review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token", "error code: \" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk)", "% github_token, }, ) if pull_comments_result.status_code != requests.codes.ok: print( \"Request to get pull", "range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num, ) pull_files_result", ") ) for change in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[", "+ str(post_review_result.status_code) ) print(\"Please report this error to the CI maintainer\") return 1", "n): yield lst[i: i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which", "page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" %", "list() # Request a maximum of 100 pages (3000 comments) for page_num in", "request comments failed with error code: \" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk", "existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for comment in existing_pull_request_comments: actual_comments = list(", "already posted comments for comment in existing_pull_request_comments: actual_comments = list( filter( lambda review_comment:", "negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments failed with error code:", "it will be useful, # but WITHOUT ANY WARRANTY; without even the implied", "files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments:", "Review event from each 15 review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url", "https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\" for i in range(0, len(lst), n):", "pages (3000 files) for page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % (", "for comment in existing_pull_request_comments: actual_comments = list( filter( lambda review_comment: not ( review_comment[\"path\"]", "review comments failed with error code: \" + str(post_review_result.status_code) ) print(\"Please report this", "101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result =", "matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments =", "report this error to the CI maintainer\") return 1 # Wait before posting", "found for this pull request.\") return 0 # Split the comments in chunks", "not in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes =", "the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\", "General Public License for more details. # # You should have received a", "this pull request.\") return 0 # Split the comments in chunks to avoid", "+= 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, ) post_review_result =", "Request.\"\"\" pull_request_files = list() # Request a maximum of 100 pages (3000 files)", "suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1 for comments_chunk in actual_comments: warning_comment =", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "failed with error code: \" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text)", "the current pull_request_id Pull Request.\"\"\" pull_request_files = list() # Request a maximum of", "return 1 # Dismanteling the review_comments object for filtering purposes. review_body = review_comments[\"body\"]", "Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # #", "\"\"\" Posts pull request review comments, excluding the existing ones and the ones", "in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review comments", "if pull_comments_result.status_code != requests.codes.ok: print( \"Request to get pull request comments failed with", "error code: \" + str(post_review_result.status_code) ) print(\"Please report this error to the CI", "in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [", "maximum of 100 pages (3000 comments) for page_num in range(1, 101): pull_comments_url =", "review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] ==", "request # Remove entries we cannot comment on as the files weren't changed", "in actual_comments: warning_comment = ( (review_body + \" (%i/%i)\") % (current_review, total_reviews) )", "the current pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical Ltd # # This", "= \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url,", "dict() for pull_request_file in pull_request_files: # Not all PR file metadata entries may", "all PR file metadata entries may contain a patch section # E.g., entries", "GitHub\") return 1 # Dismanteling the review_comments object for filtering purposes. review_body =", "if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors (false negatives?) if", "pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num, ) pull_files_result = requests.get(", "1 for comments_chunk in actual_comments: warning_comment = ( (review_body + \" (%i/%i)\") %", "comments, i.e. those modified by the current pull_request_id Pull Request.\"\"\" pull_request_files = list()", "the server # and getting 502 server errors as a response for large", "0 # Split the comments in chunks to avoid overloading the server #", "None: print(\"Couldn't get the files of this PR from GitHub\") return 1 #", "the # GNU General Public License for more details. # # You should", "pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, },", "github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token)", "= list() # Request a maximum of 100 pages (3000 comments) for page_num", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License", "ones and the ones not affecting files modified in the current pull_request_id.\"\"\" #", "for line_tag in git_line_tags ] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) +", "for more details. # # You should have received a copy of the", "len(actual_comments) current_review = 1 for comments_chunk in actual_comments: warning_comment = ( (review_body +", "before posting all chunks so to avoid triggering abuse detection time.sleep(5) return 0", "code: \" + str(post_review_result.status_code) ) print(\"Please report this error to the CI maintainer\")", "of 100 pages (3000 comments) for page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\"", "by the current pull_request_id Pull Request.\"\"\" pull_request_files = list() # Request a maximum", "int(change.split(\",\")[1]), ) ) for change in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) )", "int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change in lines_and_changes ] lines_available_for_comments = list(", "[ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ] lines_available_for_comments = [ list(", "# it under the terms of the GNU General Public License version 3", "lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change", "GNU General Public License version 3 as # published by the Free Software", "maximum of 100 pages (3000 files) for page_num in range(1, 101): pull_files_url =", "requests.codes.bad_gateway: print( \"Posting review comments failed with error code: \" + str(post_review_result.status_code) )", "maintainer\") return 1 # Wait before posting all chunks so to avoid triggering", "review comments existing_pull_request_comments = list() # Request a maximum of 100 pages (3000", "more details. # # You should have received a copy of the GNU", "If not, see <http://www.gnu.org/licenses/>. # # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "GNU General Public License for more details. # # You should have received", "reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review =", "\"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]),", "program is distributed in the hope that it will be useful, # but", "post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\" Posts a PR Review event from", "pull_request_id: int, review_comments: dict): \"\"\" Posts a PR Review event from each 15", "_files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get the files of", "distributed in the hope that it will be useful, # but WITHOUT ANY", "list of files failed with error code: \" + str(pull_files_result.status_code) ) return None", "error code: \" + str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk)", "License version 3 as # published by the Free Software Foundation. # #", "+ str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break", "of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository,", "#!/usr/bin/python3 \"\"\" Posts pull request review comments, excluding the existing ones and the", "headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_files_result.status_code !=", "= re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for", "<http://www.gnu.org/licenses/>. # # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import", "lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ] lines_available_for_comments =", "!= requests.codes.bad_gateway: print( \"Posting review comments failed with error code: \" + str(post_review_result.status_code)", "re import requests def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized", "and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ),", "!= requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors (false negatives?) if post_review_result.status_code !=", "existing review comments existing_pull_request_comments = list() # Request a maximum of 100 pages", "existing ones and the ones not affecting files modified in the current pull_request_id.\"\"\"", "pull_request_id Pull Request.\"\"\" pull_request_files = list() # Request a maximum of 100 pages", "entries related to removed binary files may not contain it if \"patch\" not", "1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, ) post_review_result = requests.post(", "os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments", "Wait before posting all chunks so to avoid triggering abuse detection time.sleep(5) return", "# # Copyright (C) 2021 Canonical Ltd # # This program is free", "\" + str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0:", "time import re import requests def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield", "c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review comments existing_pull_request_comments = list()", "repo, pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token", "= requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\":", "import time import re import requests def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464", "Free Software Foundation. # # This program is distributed in the hope that", "import re import requests def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive", "you can redistribute it and/or modify # it under the terms of the", "of this PR from GitHub\") return 1 # Dismanteling the review_comments object for", "Request a maximum of 100 pages (3000 files) for page_num in range(1, 101):", "c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] #", "may contain a patch section # E.g., entries related to removed binary files", "pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk,", "= [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change in", "comments failed with error code: \" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk =", "comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if", "pull request comments failed with error code: \" + str(pull_comments_result.status_code) ) return 1", "lst.\"\"\" for i in range(0, len(lst), n): yield lst[i: i + n] def", "for this pull request.\") return 0 # Split the comments in chunks to", "Not all PR file metadata entries may contain a patch section # E.g.,", "files of this PR from GitHub\") return 1 # Dismanteling the review_comments object", "review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) ) if len(actual_comments) == 0: print(\"No new", "comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"]", "PARTICULAR PURPOSE. See the # GNU General Public License for more details. #", "General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #", "we cannot comment on as the files weren't changed in this # pull", "contain it if \"patch\" not in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+", "review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict() # Ignore comments", "and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) ) if", "% ( github_api_url, repo, pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url, headers={ \"Accept\":", "0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files: # Not all", "in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments", "Remove entries we cannot comment on as the files weren't changed in this", "headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if post_review_result.status_code !=", "list( filter( lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"]", "hope that it will be useful, # but WITHOUT ANY WARRANTY; without even", "program. If not, see <http://www.gnu.org/licenses/>. # # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import", "current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, ) post_review_result", "list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change in lines_and_changes ]", ") current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, )", "github_token): \"\"\"Lists which files and lines are allowed to receive comments, i.e. those", "Exclude already posted comments for comment in existing_pull_request_comments: actual_comments = list( filter( lambda", "502 server errors as a response for large reviews suggestions_per_comment = 15 actual_comments", "0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for comment in existing_pull_request_comments: actual_comments", "i in range(0, len(lst), n): yield lst[i: i + n] def _files_from_this_pr(github_api_url, repo,", "3 as # published by the Free Software Foundation. # # This program", "review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, },", "json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in", "# and getting 502 server errors as a response for large reviews suggestions_per_comment", "in the current pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical Ltd # #", "Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os import time", ") post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, },", "import requests def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks", "% ( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\":", "+ str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break", "requests.codes.ok: print( \"Request to get list of files failed with error code: \"", "print( \"Request to get pull request comments failed with error code: \" +", "review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) ) if len(actual_comments)", "This program is distributed in the hope that it will be useful, #", "== 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files: # Not", "pull request.\") return 0 # Split the comments in chunks to avoid overloading", "comments in chunks to avoid overloading the server # and getting 502 server", "if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments failed with error code: \"", "repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\":", "useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #", "= \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url,", "itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id:", "request review comments, excluding the existing ones and the ones not affecting files", "in chunks to avoid overloading the server # and getting 502 server errors", "received a copy of the GNU General Public License # along with this", ") if pull_comments_result.status_code != requests.codes.ok: print( \"Request to get pull request comments failed", "print(post_review_result.text) # Ignore bad gateway errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print(", "c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review", "not affecting files modified in the current pull_request_id.\"\"\" # # Copyright (C) 2021", "cannot comment on as the files weren't changed in this # pull request", "\"Authorization\": \"token %s\" % github_token, }, ) if pull_comments_result.status_code != requests.codes.ok: print( \"Request", "as the files weren't changed in this # pull request actual_comments = [", "# Remove entries we cannot comment on as the files weren't changed in", "\"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, )", "comments existing_pull_request_comments = list() # Request a maximum of 100 pages (3000 comments)", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "requests.codes.ok: print( \"Request to get pull request comments failed with error code: \"", "request.\") return 0 # Split the comments in chunks to avoid overloading the", "copy of the GNU General Public License # along with this program. If", "files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files: # Not all PR file metadata", ") pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token,", "line_tag in git_line_tags ] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]),", "# # This program is distributed in the hope that it will be", "affecting files modified in the current pull_request_id.\"\"\" # # Copyright (C) 2021 Canonical", "lines that were not changed in the pull request # Remove entries we", ") ) if len(actual_comments) == 0: print(\"No new warnings found for this pull", "15 review_comments which matching the output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token =", "= lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\" Posts", "\"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_comments_result.status_code != requests.codes.ok:", "_files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files and lines are allowed to receive", "( github_api_url, repo, pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\",", "# Wait before posting all chunks so to avoid triggering abuse detection time.sleep(5)", "the GNU General Public License version 3 as # published by the Free", "files_and_lines_available_for_comments is None: print(\"Couldn't get the files of this PR from GitHub\") return", "with this program. If not, see <http://www.gnu.org/licenses/>. # # Source code adapted from", "for filtering purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments", "and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) ) if len(actual_comments) == 0: print(\"No", "pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted", "def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\"", "be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of", "Pull Request.\"\"\" pull_request_files = list() # Request a maximum of 100 pages (3000", "pull_request_file in pull_request_files: # Not all PR file metadata entries may contain a", "entries we cannot comment on as the files weren't changed in this #", "github_token, }, ) if pull_files_result.status_code != requests.codes.ok: print( \"Request to get list of", "chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\" for", "@@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ]", "warnings found for this pull request.\") return 0 # Split the comments in", "yield lst[i: i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files", "in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num, )", "is distributed in the hope that it will be useful, # but WITHOUT", "Ignore comments on lines that were not changed in the pull request #", "( (review_body + \" (%i/%i)\") % (current_review, total_reviews) ) current_review += 1 pull_request_reviews_url", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public", "pull_request_id, page_num, ) pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\"", "contain a patch section # E.g., entries related to removed binary files may", "100 pages (3000 comments) for page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" %", "in the hope that it will be useful, # but WITHOUT ANY WARRANTY;", "(C) 2021 Canonical Ltd # # This program is free software: you can", "\"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={", "# E.g., entries related to removed binary files may not contain it if", "file metadata entries may contain a patch section # E.g., entries related to", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for", "github_token, }, ) if pull_comments_result.status_code != requests.codes.ok: print( \"Request to get pull request", "= list( filter( lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] ==", "from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\" for i in range(0, len(lst),", "were not changed in the pull request # Remove entries we cannot comment", "requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway:", "This program is free software: you can redistribute it and/or modify # it", "and/or modify # it under the terms of the GNU General Public License", "GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.", "from GitHub\") return 1 # Dismanteling the review_comments object for filtering purposes. review_body", "review_comments object for filtering purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments =", "pull_comments_result.status_code != requests.codes.ok: print( \"Request to get pull request comments failed with error", "Split the comments in chunks to avoid overloading the server # and getting", "comments, excluding the existing ones and the ones not affecting files modified in", "Copyright (C) 2021 Canonical Ltd # # This program is free software: you", "posted comments for comment in existing_pull_request_comments: actual_comments = list( filter( lambda review_comment: not", "import os import time import re import requests def chunks(lst, n): # Copied", "return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments =", "\"patch\" not in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes", "== comment[\"body\"] ), actual_comments, ) ) if len(actual_comments) == 0: print(\"No new warnings", "if \"patch\" not in pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"])", "+ int(change.split(\",\")[1]), ) ) for change in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments)", "= [ c for c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"]", ") files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int,", "\"token %s\" % github_token, }, ) if pull_files_result.status_code != requests.codes.ok: print( \"Request to", "% ( github_api_url, repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment,", "CI maintainer\") return 1 # Wait before posting all chunks so to avoid", "not, see <http://www.gnu.org/licenses/>. # # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import", "] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments", "suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1", "lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def", "github_token, }, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors", "bad gateway errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments", "len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for comment in", "os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't", ") return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) #", "can redistribute it and/or modify # it under the terms of the GNU", "\"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text)", "on lines that were not changed in the pull request # Remove entries", "c for c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]]", "# This program is distributed in the hope that it will be useful,", "\" (%i/%i)\") % (current_review, total_reviews) ) current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" %", "but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "comments_chunk in actual_comments: warning_comment = ( (review_body + \" (%i/%i)\") % (current_review, total_reviews)", "== 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for comment in existing_pull_request_comments:", "1 pull_comments_chunk = json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already", "lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str, pull_request_id: int, review_comments: dict): \"\"\" Posts a", "pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get the files of this PR", "removed binary files may not contain it if \"patch\" not in pull_request_file: continue", "] # Load the existing review comments existing_pull_request_comments = list() # Request a", "code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os import time import", "1 # Dismanteling the review_comments object for filtering purposes. review_body = review_comments[\"body\"] review_event", ") return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments", "json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token", "len(lst), n): yield lst[i: i + n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists", "str(post_review_result.status_code) ) print(\"Please report this error to the CI maintainer\") return 1 #", "pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_files_result.status_code", "pull_request_file: continue git_line_tags = re.findall(r\"@@ -\\d+,\\d+ \\+\\d+,\\d+ @@\", pull_request_file[\"patch\"]) lines_and_changes = [ line_tag.replace(\"@@\",", "actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1 for comments_chunk in", "pull_files_result = requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, },", "repo, pull_request_id, github_token): \"\"\"Lists which files and lines are allowed to receive comments,", "import itertools import json import os import time import re import requests def", "in pull_request_files: # Not all PR file metadata entries may contain a patch", "get list of files failed with error code: \" + str(pull_files_result.status_code) ) return", "those modified by the current pull_request_id Pull Request.\"\"\" pull_request_files = list() # Request", "print(\"Couldn't get the files of this PR from GitHub\") return 1 # Dismanteling", "and lines are allowed to receive comments, i.e. those modified by the current", "= 1 for comments_chunk in actual_comments: warning_comment = ( (review_body + \" (%i/%i)\")", "int, review_comments: dict): \"\"\" Posts a PR Review event from each 15 review_comments", "git_line_tags ] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) )", "of the GNU General Public License # along with this program. If not,", "comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) ) if len(actual_comments) == 0:", "if len(actual_comments) == 0: print(\"No new warnings found for this pull request.\") return", "License for more details. # # You should have received a copy of", "the review_comments object for filtering purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments", "a patch section # E.g., entries related to removed binary files may not", "the pull request # Remove entries we cannot comment on as the files", "\"\"\"Yield successive n-sized chunks from lst.\"\"\" for i in range(0, len(lst), n): yield", "object for filtering purposes. review_body = review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"]", "lines are allowed to receive comments, i.e. those modified by the current pull_request_id", "# Exclude already posted comments for comment in existing_pull_request_comments: actual_comments = list( filter(", "requests.get( pull_files_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if", "actual_comments, ) ) if len(actual_comments) == 0: print(\"No new warnings found for this", "\"Posting review comments failed with error code: \" + str(post_review_result.status_code) ) print(\"Please report", "ones not affecting files modified in the current pull_request_id.\"\"\" # # Copyright (C)", "redistribute it and/or modify # it under the terms of the GNU General", "Canonical Ltd # # This program is free software: you can redistribute it", "import json import os import time import re import requests def chunks(lst, n):", "to get list of files failed with error code: \" + str(pull_files_result.status_code) )", "published by the Free Software Foundation. # # This program is distributed in", "# Request a maximum of 100 pages (3000 comments) for page_num in range(1,", "weren't changed in this # pull request actual_comments = [ c for c", "to get pull request comments failed with error code: \" + str(pull_comments_result.status_code) )", "== comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) ) if len(actual_comments) ==", "which files and lines are allowed to receive comments, i.e. those modified by", "== 0: print(\"No new warnings found for this pull request.\") return 0 #", "is free software: you can redistribute it and/or modify # it under the", "for c in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ]", "Posts pull request review comments, excluding the existing ones and the ones not", "len(actual_comments) == 0: print(\"No new warnings found for this pull request.\") return 0", "(current_review, total_reviews) ) current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository,", "= dict() # Ignore comments on lines that were not changed in the", "0: print(\"No new warnings found for this pull request.\") return 0 # Split", "= os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None:", "# Split the comments in chunks to avoid overloading the server # and", "existing_pull_request_comments: actual_comments = list( filter( lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and", "= len(actual_comments) current_review = 1 for comments_chunk in actual_comments: warning_comment = ( (review_body", "= review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict() # Ignore comments on lines", "comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, ) )", "), actual_comments, ) ) if len(actual_comments) == 0: print(\"No new warnings found for", "for change in lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ]", "[ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change in lines_and_changes", "in files_and_lines_available_for_comments[c[\"path\"]] ] # Load the existing review comments existing_pull_request_comments = list() #", "Ltd # # This program is free software: you can redistribute it and/or", "\"\"\" Posts a PR Review event from each 15 review_comments which matching the", "See the # GNU General Public License for more details. # # You", "output of `files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url,", "gateway errors (false negatives?) if post_review_result.status_code != requests.codes.bad_gateway: print( \"Posting review comments failed", "server # and getting 502 server errors as a response for large reviews", "warning_comment = ( (review_body + \" (%i/%i)\") % (current_review, total_reviews) ) current_review +=", "version 3 as # published by the Free Software Foundation. # # This", "# Dismanteling the review_comments object for filtering purposes. review_body = review_comments[\"body\"] review_event =", "n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from lst.\"\"\" for i", "actual_comments = list( filter( lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"]", "code: \" + str(pull_files_result.status_code) ) return None pull_files_chunk = json.loads(pull_files_result.text) if len(pull_files_chunk) ==", "list() # Request a maximum of 100 pages (3000 files) for page_num in", "\"token %s\" % github_token, }, ) if pull_comments_result.status_code != requests.codes.ok: print( \"Request to", "Posts a PR Review event from each 15 review_comments which matching the output", "github_api_url, repository, pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\":", "changed in this # pull request actual_comments = [ c for c in", "comments on lines that were not changed in the pull request # Remove", "terms of the GNU General Public License version 3 as # published by", "}, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore bad gateway errors (false", "errors as a response for large reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments,", "= json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments", "from lst.\"\"\" for i in range(0, len(lst), n): yield lst[i: i + n]", "`files_and_lines_available_for_comments`\"\"\" github_api_url = os.environ.get(\"GITHUB_API_URL\") github_token = os.environ.get(\"INPUT_GITHUB_TOKEN\") files_and_lines_available_for_comments = \\ _files_from_this_pr(github_api_url, repository, pull_request_id,", "== comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments, )", "for comments_chunk in actual_comments: warning_comment = ( (review_body + \" (%i/%i)\") % (current_review,", "= ( (review_body + \" (%i/%i)\") % (current_review, total_reviews) ) current_review += 1", "pull_request_reviews_url, json={ \"body\": warning_comment, \"event\": review_event, \"comments\": comments_chunk, }, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\":", "review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and review_comment[\"body\"] == comment[\"body\"] ), actual_comments,", "Public License version 3 as # published by the Free Software Foundation. #", "= review_comments[\"comments\"] actual_comments = dict() # Ignore comments on lines that were not", "for page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id,", "pages (3000 comments) for page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % (", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "% (current_review, total_reviews) ) current_review += 1 pull_request_reviews_url = \"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url,", "!= requests.codes.ok: print( \"Request to get pull request comments failed with error code:", "with error code: \" + str(pull_comments_result.status_code) ) return 1 pull_comments_chunk = json.loads(pull_comments_result.text) if", "( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"] and", ") if len(actual_comments) == 0: print(\"No new warnings found for this pull request.\")", "comments = review_comments[\"comments\"] actual_comments = dict() # Ignore comments on lines that were", "# published by the Free Software Foundation. # # This program is distributed", "\"Request to get pull request comments failed with error code: \" + str(pull_comments_result.status_code)", "WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS", "page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo, pull_request_id, page_num,", "headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token %s\" % github_token, }, ) if pull_comments_result.status_code !=", ") if pull_files_result.status_code != requests.codes.ok: print( \"Request to get list of files failed", "# # You should have received a copy of the GNU General Public", "lambda review_comment: not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"]", "itertools import json import os import time import re import requests def chunks(lst,", "review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict() # Ignore comments on lines that", "if len(pull_files_chunk) == 0: break pull_request_files.extend(pull_files_chunk) files_and_lines_available_for_comments = dict() for pull_request_file in pull_request_files:", "FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more", "PR from GitHub\") return 1 # Dismanteling the review_comments object for filtering purposes.", "# # This program is free software: you can redistribute it and/or modify", "repository, pull_request_id, page_num, ) pull_comments_result = requests.get( pull_comments_url, headers={ \"Accept\": \"application/vnd.github.v3+json\", \"Authorization\": \"token", "have received a copy of the GNU General Public License # along with", "15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments) current_review = 1 for comments_chunk", "avoid overloading the server # and getting 502 server errors as a response", "+ n] def _files_from_this_pr(github_api_url, repo, pull_request_id, github_token): \"\"\"Lists which files and lines are", "line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ] lines_available_for_comments = [ list( range(", "json.loads(pull_comments_result.text) if len(pull_comments_chunk) == 0: break existing_pull_request_comments.extend(pull_comments_chunk) # Exclude already posted comments for", "Public License for more details. # # You should have received a copy", "for large reviews suggestions_per_comment = 15 actual_comments = list(chunks(actual_comments, suggestions_per_comment)) total_reviews = len(actual_comments)", "not ( review_comment[\"path\"] == comment[\"path\"] and review_comment[\"line\"] == comment[\"line\"] and review_comment[\"side\"] == comment[\"side\"]", "are allowed to receive comments, i.e. those modified by the current pull_request_id Pull", "in comments if c[\"path\"] in files_and_lines_available_for_comments.keys() and c[\"line\"] in files_and_lines_available_for_comments[c[\"path\"]] ] # Load", "not changed in the pull request # Remove entries we cannot comment on", "lines_and_changes ] lines_available_for_comments = list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return", "\"%s/repos/%s/pulls/%d/reviews\" % ( github_api_url, repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={ \"body\":", "] lines_available_for_comments = [ list( range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for", "overloading the server # and getting 502 server errors as a response for", "chunks to avoid overloading the server # and getting 502 server errors as", "requests def chunks(lst, n): # Copied from: https://stackoverflow.com/a/312464 \"\"\"Yield successive n-sized chunks from", "( github_api_url, repository, pull_request_id, ) post_review_result = requests.post( pull_request_reviews_url, json={ \"body\": warning_comment, \"event\":", "comment[\"body\"] ), actual_comments, ) ) if len(actual_comments) == 0: print(\"No new warnings found", "os import time import re import requests def chunks(lst, n): # Copied from:", "successive n-sized chunks from lst.\"\"\" for i in range(0, len(lst), n): yield lst[i:", "# Request a maximum of 100 pages (3000 files) for page_num in range(1,", "range( int(change.split(\",\")[0]), int(change.split(\",\")[0]) + int(change.split(\",\")[1]), ) ) for change in lines_and_changes ] lines_available_for_comments", "}, ) if pull_files_result.status_code != requests.codes.ok: print( \"Request to get list of files", "of files failed with error code: \" + str(pull_files_result.status_code) ) return None pull_files_chunk", "patch section # E.g., entries related to removed binary files may not contain", "range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result", "current_review = 1 for comments_chunk in actual_comments: warning_comment = ( (review_body + \"", "\"token %s\" % github_token, }, ) if post_review_result.status_code != requests.codes.ok: print(post_review_result.text) # Ignore", "that were not changed in the pull request # Remove entries we cannot", "json import os import time import re import requests def chunks(lst, n): #", "related to removed binary files may not contain it if \"patch\" not in", "= [ line_tag.replace(\"@@\", \"\").strip().split()[1].replace(\"+\", \"\") for line_tag in git_line_tags ] lines_available_for_comments = [", "entries may contain a patch section # E.g., entries related to removed binary", "\\ _files_from_this_pr(github_api_url, repository, pull_request_id, github_token) if files_and_lines_available_for_comments is None: print(\"Couldn't get the files", "if files_and_lines_available_for_comments is None: print(\"Couldn't get the files of this PR from GitHub\")", "list( itertools.chain.from_iterable(lines_available_for_comments) ) files_and_lines_available_for_comments[ pull_request_file[\"filename\"] ] = lines_available_for_comments return files_and_lines_available_for_comments def post_pr_review_comments(repository: str,", "}, ) if pull_comments_result.status_code != requests.codes.ok: print( \"Request to get pull request comments", "review comments, excluding the existing ones and the ones not affecting files modified", "will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty", "= review_comments[\"body\"] review_event = review_comments[\"event\"] comments = review_comments[\"comments\"] actual_comments = dict() # Ignore", "\"\"\"Lists which files and lines are allowed to receive comments, i.e. those modified", "in the pull request # Remove entries we cannot comment on as the", "str, pull_request_id: int, review_comments: dict): \"\"\" Posts a PR Review event from each", "the ones not affecting files modified in the current pull_request_id.\"\"\" # # Copyright", "files) for page_num in range(1, 101): pull_files_url = \"%s/repos/%s/pulls/%d/files?page=%d\" % ( github_api_url, repo,", "# # Source code adapted from https://github.com/platisd/clang-tidy-pr-comments. import itertools import json import os", "pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num, ) pull_comments_result = requests.get(", "Software Foundation. # # This program is distributed in the hope that it", "# Load the existing review comments existing_pull_request_comments = list() # Request a maximum", "# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "may not contain it if \"patch\" not in pull_request_file: continue git_line_tags = re.findall(r\"@@", "page_num in range(1, 101): pull_comments_url = \"%s/repos/%s/pulls/%d/comments?page=%d\" % ( github_api_url, repository, pull_request_id, page_num,", "actual_comments = dict() # Ignore comments on lines that were not changed in", "\"Request to get list of files failed with error code: \" + str(pull_files_result.status_code)" ]
[ "for path, handler in self.routes.items(): parse_result = parse(path, request_path) if parse_result is not", "404 response.text = \"Not found\" def find_handler(self, request_path): for path, handler in self.routes.items():", "is not None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if handler is", "import WSGIAdapter as RequestsWSGIAdapter import os from jinja2 import Environment, FileSystemLoader from whitenoise", "found\" def find_handler(self, request_path): for path, handler in self.routes.items(): parse_result = parse(path, request_path)", "def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info,", "WSGIAdapter as RequestsWSGIAdapter import os from jinja2 import Environment, FileSystemLoader from whitenoise import", "= Request(environ) response = self.handle_request(request) return response(environ, start_response) def __call__(self, environ, start_response): path_info", "route(self, path): def wrapper(handler): self.add_route(path, handler) return handler return wrapper def add_route(self, path,", "session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response = Response()", "from parse import parse import inspect from requests import Session as RequestsSession from", "def find_handler(self, request_path): for path, handler in self.routes.items(): parse_result = parse(path, request_path) if", "start_response): request = Request(environ) response = self.handle_request(request) return response(environ, start_response) def __call__(self, environ,", "= cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls)", "parse(path, request_path) if parse_result is not None: return handler, parse_result.named return None, None", "return handler return wrapper def add_route(self, path, handler): assert path not in self.routes,", "handler, kwargs = self.find_handler(request_path=request.path) try: if handler is not None: if inspect.isclass(handler): handler", "self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ, start_response):", "not allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response) except Exception as e: if", "= environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return", "self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self)", "Middleware(self) def wsgi_app(self, environ, start_response): request = Request(environ) response = self.handle_request(request) return response(environ,", "def handle_request(self, request): response = Response() handler, kwargs = self.find_handler(request_path=request.path) try: if handler", "e else: self.exception_handler(request, response, e) return response def default_response(self, response): response.status_code = 404", "wrapper(handler): self.add_route(path, handler) return handler return wrapper def add_route(self, path, handler): assert path", "= 404 response.text = \"Not found\" def find_handler(self, request_path): for path, handler in", "self.exception_handler is None: raise e else: self.exception_handler(request, response, e) return response def default_response(self,", "is None: raise e else: self.exception_handler(request, response, e) return response def default_response(self, response):", "handler is not None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if handler", "is None: context = {} return self.templates_env.get_template(template_name).render(**context) def add_exception_handler(self, exception_handler): self.exception_handler = exception_handler", "in not allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response) except Exception as e:", "request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir)))", "environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ,", "handler return wrapper def add_route(self, path, handler): assert path not in self.routes, f\"{path}", "getattr(handler(), request.method.lower(), None) if handler is None: raise AttributeError(\"Method in not allowed\", request.method)", "if handler is not None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if", "start_response) def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] =", "context is None: context = {} return self.templates_env.get_template(template_name).render(**context) def add_exception_handler(self, exception_handler): self.exception_handler =", "= os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ, start_response): request", "middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path, handler) return handler return wrapper", "session def handle_request(self, request): response = Response() handler, kwargs = self.find_handler(request_path=request.path) try: if", "kwargs = self.find_handler(request_path=request.path) try: if handler is not None: if inspect.isclass(handler): handler =", "parse_result = parse(path, request_path) if parse_result is not None: return handler, parse_result.named return", "return response(environ, start_response) def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root):", "import parse import inspect from requests import Session as RequestsSession from wsgiadapter import", "wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os from jinja2 import Environment, FileSystemLoader from", "as RequestsWSGIAdapter import os from jinja2 import Environment, FileSystemLoader from whitenoise import WhiteNoise", "as RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os from jinja2 import", "response.status_code = 404 response.text = \"Not found\" def find_handler(self, request_path): for path, handler", "if parse_result is not None: return handler, parse_result.named return None, None def template(self,", "handler, parse_result.named return None, None def template(self, template_name, context=None): if context is None:", "return None, None def template(self, template_name, context=None): if context is None: context =", "handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request):", "import Session as RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os from", "if handler is None: raise AttributeError(\"Method in not allowed\", request.method) handler(request, response, **kwargs)", "= self.find_handler(request_path=request.path) try: if handler is not None: if inspect.isclass(handler): handler = getattr(handler(),", "handler): assert path not in self.routes, f\"{path} already exists.\" self.routes[path] = handler def", "os from jinja2 import Environment, FileSystemLoader from whitenoise import WhiteNoise from middleware import", "FileSystemLoader from whitenoise import WhiteNoise from middleware import Middleware from static import cut_static_root,", "path not in self.routes, f\"{path} already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session", "return session def handle_request(self, request): response = Response() handler, kwargs = self.find_handler(request_path=request.path) try:", "return handler, parse_result.named return None, None def template(self, template_name, context=None): if context is", "f\"{path} already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self))", "in self.routes, f\"{path} already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession()", "path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response)", "RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response = Response() handler, kwargs", "request_path) if parse_result is not None: return handler, parse_result.named return None, None def", "import os from jinja2 import Environment, FileSystemLoader from whitenoise import WhiteNoise from middleware", "context=None): if context is None: context = {} return self.templates_env.get_template(template_name).render(**context) def add_exception_handler(self, exception_handler):", "Middleware from static import cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes", "handle_request(self, request): response = Response() handler, kwargs = self.find_handler(request_path=request.path) try: if handler is", "from middleware import Middleware from static import cut_static_root, request_for_static class API: def __init__(self,", "AttributeError(\"Method in not allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response) except Exception as", "in self.routes.items(): parse_result = parse(path, request_path) if parse_result is not None: return handler,", "environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return", "response): response.status_code = 404 response.text = \"Not found\" def find_handler(self, request_path): for path,", "start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler):", "self.find_handler(request_path=request.path) try: if handler is not None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(),", "assert path not in self.routes, f\"{path} already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"):", "path, handler): assert path not in self.routes, f\"{path} already exists.\" self.routes[path] = handler", "static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app,", "allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response) except Exception as e: if self.exception_handler", "request = Request(environ) response = self.handle_request(request) return response(environ, start_response) def __call__(self, environ, start_response):", "default_response(self, response): response.status_code = 404 response.text = \"Not found\" def find_handler(self, request_path): for", "self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def", "import Environment, FileSystemLoader from whitenoise import WhiteNoise from middleware import Middleware from static", "test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response =", "= parse(path, request_path) if parse_result is not None: return handler, parse_result.named return None,", "if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response)", "parse_result.named return None, None def template(self, template_name, context=None): if context is None: context", "from webob import Request, Response from parse import parse import inspect from requests", "import WhiteNoise from middleware import Middleware from static import cut_static_root, request_for_static class API:", "raise e else: self.exception_handler(request, response, e) return response def default_response(self, response): response.status_code =", "= RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response = Response() handler,", "handler in self.routes.items(): parse_result = parse(path, request_path) if parse_result is not None: return", "WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self) def wsgi_app(self,", "response def default_response(self, response): response.status_code = 404 response.text = \"Not found\" def find_handler(self,", "self.middleware = Middleware(self) def wsgi_app(self, environ, start_response): request = Request(environ) response = self.handle_request(request)", "__init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise", "def default_response(self, response): response.status_code = 404 response.text = \"Not found\" def find_handler(self, request_path):", "e: if self.exception_handler is None: raise e else: self.exception_handler(request, response, e) return response", "= None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware", "is not None: return handler, parse_result.named return None, None def template(self, template_name, context=None):", "= Middleware(self) def wsgi_app(self, environ, start_response): request = Request(environ) response = self.handle_request(request) return", "handler = getattr(handler(), request.method.lower(), None) if handler is None: raise AttributeError(\"Method in not", "def route(self, path): def wrapper(handler): self.add_route(path, handler) return handler return wrapper def add_route(self,", "__call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root)", "wsgi_app(self, environ, start_response): request = Request(environ) response = self.handle_request(request) return response(environ, start_response) def", "= \"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ, start_response): request = Request(environ) response", "environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls):", "None, None def template(self, template_name, context=None): if context is None: context = {}", "return wrapper def add_route(self, path, handler): assert path not in self.routes, f\"{path} already", "Response from parse import parse import inspect from requests import Session as RequestsSession", "from requests import Session as RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import", "Environment, FileSystemLoader from whitenoise import WhiteNoise from middleware import Middleware from static import", "import cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env", "WhiteNoise from middleware import Middleware from static import cut_static_root, request_for_static class API: def", "RequestsWSGIAdapter import os from jinja2 import Environment, FileSystemLoader from whitenoise import WhiteNoise from", "self.add_route(path, handler) return handler return wrapper def add_route(self, path, handler): assert path not", "= handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self,", "add_route(self, path, handler): assert path not in self.routes, f\"{path} already exists.\" self.routes[path] =", "session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response = Response() handler, kwargs =", "None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if handler is None: raise", "inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if handler is None: raise AttributeError(\"Method in", "is None: raise AttributeError(\"Method in not allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response)", "return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path,", "not None: return handler, parse_result.named return None, None def template(self, template_name, context=None): if", "request.method) handler(request, response, **kwargs) else: self.default_response(response) except Exception as e: if self.exception_handler is", "response, e) return response def default_response(self, response): response.status_code = 404 response.text = \"Not", "= getattr(handler(), request.method.lower(), None) if handler is None: raise AttributeError(\"Method in not allowed\",", "else: self.default_response(response) except Exception as e: if self.exception_handler is None: raise e else:", "\"Not found\" def find_handler(self, request_path): for path, handler in self.routes.items(): parse_result = parse(path,", "from jinja2 import Environment, FileSystemLoader from whitenoise import WhiteNoise from middleware import Middleware", "def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response", "requests import Session as RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os", "= {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir", "return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path):", "self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self,", "else: self.exception_handler(request, response, e) return response def default_response(self, response): response.status_code = 404 response.text", "def template(self, template_name, context=None): if context is None: context = {} return self.templates_env.get_template(template_name).render(**context)", "whitenoise import WhiteNoise from middleware import Middleware from static import cut_static_root, request_for_static class", "template(self, template_name, context=None): if context is None: context = {} return self.templates_env.get_template(template_name).render(**context) def", "handler) return handler return wrapper def add_route(self, path, handler): assert path not in", "Session as RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os from jinja2", "response, **kwargs) else: self.default_response(response) except Exception as e: if self.exception_handler is None: raise", "parse import inspect from requests import Session as RequestsSession from wsgiadapter import WSGIAdapter", "path, handler in self.routes.items(): parse_result = parse(path, request_path) if parse_result is not None:", "template_name, context=None): if context is None: context = {} return self.templates_env.get_template(template_name).render(**context) def add_exception_handler(self,", "self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir)", "if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if handler is None: raise AttributeError(\"Method", "def wsgi_app(self, environ, start_response): request = Request(environ) response = self.handle_request(request) return response(environ, start_response)", "response.text = \"Not found\" def find_handler(self, request_path): for path, handler in self.routes.items(): parse_result", "response(environ, start_response) def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"]", "None: raise e else: self.exception_handler(request, response, e) return response def default_response(self, response): response.status_code", "API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler =", "parse import parse import inspect from requests import Session as RequestsSession from wsgiadapter", "class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler", "self.exception_handler(request, response, e) return response def default_response(self, response): response.status_code = 404 response.text =", "None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware =", "RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os from jinja2 import Environment,", "except Exception as e: if self.exception_handler is None: raise e else: self.exception_handler(request, response,", "Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root =", "request_path): for path, handler in self.routes.items(): parse_result = parse(path, request_path) if parse_result is", "\"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ, start_response): request = Request(environ) response =", "self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\"", "webob import Request, Response from parse import parse import inspect from requests import", "adapter=RequestsWSGIAdapter(self)) return session def handle_request(self, request): response = Response() handler, kwargs = self.find_handler(request_path=request.path)", "= \"Not found\" def find_handler(self, request_path): for path, handler in self.routes.items(): parse_result =", "= self.handle_request(request) return response(environ, start_response) def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if", "middleware import Middleware from static import cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\",", "= WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self) def", "self.handle_request(request) return response(environ, start_response) def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info,", "import Request, Response from parse import parse import inspect from requests import Session", "None: return handler, parse_result.named return None, None def template(self, template_name, context=None): if context", "if context is None: context = {} return self.templates_env.get_template(template_name).render(**context) def add_exception_handler(self, exception_handler): self.exception_handler", "path): def wrapper(handler): self.add_route(path, handler) return handler return wrapper def add_route(self, path, handler):", "from static import cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes =", "request.method.lower(), None) if handler is None: raise AttributeError(\"Method in not allowed\", request.method) handler(request,", "= Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root", "{} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir) self.static_dir =", "exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return session", "return response def default_response(self, response): response.status_code = 404 response.text = \"Not found\" def", "Request(environ) response = self.handle_request(request) return response(environ, start_response) def __call__(self, environ, start_response): path_info =", "Exception as e: if self.exception_handler is None: raise e else: self.exception_handler(request, response, e)", "response = self.handle_request(request) return response(environ, start_response) def __call__(self, environ, start_response): path_info = environ[\"PATH_INFO\"]", "start_response): path_info = environ[\"PATH_INFO\"] if request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ,", "request): response = Response() handler, kwargs = self.find_handler(request_path=request.path) try: if handler is not", "if self.exception_handler is None: raise e else: self.exception_handler(request, response, e) return response def", "def add_route(self, path, handler): assert path not in self.routes, f\"{path} already exists.\" self.routes[path]", "self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def", "self._static_root = \"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ, start_response): request = Request(environ)", "self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path, handler) return handler return wrapper def", "import Middleware from static import cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"):", "try: if handler is not None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None)", "wrapper def add_route(self, path, handler): assert path not in self.routes, f\"{path} already exists.\"", "jinja2 import Environment, FileSystemLoader from whitenoise import WhiteNoise from middleware import Middleware from", "find_handler(self, request_path): for path, handler in self.routes.items(): parse_result = parse(path, request_path) if parse_result", "not None: if inspect.isclass(handler): handler = getattr(handler(), request.method.lower(), None) if handler is None:", "None def template(self, template_name, context=None): if context is None: context = {} return", "Response() handler, kwargs = self.find_handler(request_path=request.path) try: if handler is not None: if inspect.isclass(handler):", "add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path, handler) return handler return", "os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ, start_response): request =", "import inspect from requests import Session as RequestsSession from wsgiadapter import WSGIAdapter as", "root=static_dir) self.static_dir = os.path.abspath(static_dir) self._static_root = \"/static\" self.middleware = Middleware(self) def wsgi_app(self, environ,", "response = Response() handler, kwargs = self.find_handler(request_path=request.path) try: if handler is not None:", "= Response() handler, kwargs = self.find_handler(request_path=request.path) try: if handler is not None: if", "start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path, handler) return", "from whitenoise import WhiteNoise from middleware import Middleware from static import cut_static_root, request_for_static", "cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def", "inspect from requests import Session as RequestsSession from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter", "handler(request, response, **kwargs) else: self.default_response(response) except Exception as e: if self.exception_handler is None:", "as e: if self.exception_handler is None: raise e else: self.exception_handler(request, response, e) return", "def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None", "handler is None: raise AttributeError(\"Method in not allowed\", request.method) handler(request, response, **kwargs) else:", "e) return response def default_response(self, response): response.status_code = 404 response.text = \"Not found\"", "None: raise AttributeError(\"Method in not allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response) except", "raise AttributeError(\"Method in not allowed\", request.method) handler(request, response, **kwargs) else: self.default_response(response) except Exception", "def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path, handler) return handler", "None) if handler is None: raise AttributeError(\"Method in not allowed\", request.method) handler(request, response,", "cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env =", "request_for_static(path_info, self._static_root): environ[\"PATH_INFO\"] = cut_static_root(path_info, self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def", "self._static_root) return self.whitenoise(environ, start_response) return self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self,", "already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url, adapter=RequestsWSGIAdapter(self)) return", "parse_result is not None: return handler, parse_result.named return None, None def template(self, template_name,", "not in self.routes, f\"{path} already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session =", "self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise = WhiteNoise(self.wsgi_app, root=static_dir)", "self.routes.items(): parse_result = parse(path, request_path) if parse_result is not None: return handler, parse_result.named", "environ, start_response): request = Request(environ) response = self.handle_request(request) return response(environ, start_response) def __call__(self,", "Request, Response from parse import parse import inspect from requests import Session as", "templates_dir=\"templates\", static_dir=\"static\"): self.routes = {} self.templates_env = Environment(loader=FileSystemLoader(os.path.abspath(templates_dir))) self.exception_handler = None self.whitenoise =", "from wsgiadapter import WSGIAdapter as RequestsWSGIAdapter import os from jinja2 import Environment, FileSystemLoader", "static import cut_static_root, request_for_static class API: def __init__(self, templates_dir=\"templates\", static_dir=\"static\"): self.routes = {}", "def wrapper(handler): self.add_route(path, handler) return handler return wrapper def add_route(self, path, handler): assert", "self.middleware(environ, start_response) def add_middleware(self, middleware_cls): self.middleware.add(middleware_cls) def route(self, path): def wrapper(handler): self.add_route(path, handler)", "self.default_response(response) except Exception as e: if self.exception_handler is None: raise e else: self.exception_handler(request,", "**kwargs) else: self.default_response(response) except Exception as e: if self.exception_handler is None: raise e", "self.routes, f\"{path} already exists.\" self.routes[path] = handler def test_session(self,base_url=\"http:''testserver\"): session = RequestsSession() session.mount(prefix=base_url," ]
[ "class LogSourcesTestCase(TestCase): def setUp(self): LogSources.objects.create( Description=\"Firewall of gnu/linux kernel\", Type=\"Iptables\", Model=\"iptables v1.4.21\", Active=1,", "setUp(self): LogSources.objects.create( Description=\"Firewall of gnu/linux kernel\", Type=\"Iptables\", Model=\"iptables v1.4.21\", Active=1, Software_Class=\"Firewall\", Path=\"iptables\", )", "LogSourcesTestCase(TestCase): def setUp(self): LogSources.objects.create( Description=\"Firewall of gnu/linux kernel\", Type=\"Iptables\", Model=\"iptables v1.4.21\", Active=1, Software_Class=\"Firewall\",", "def setUp(self): LogSources.objects.create( Description=\"Firewall of gnu/linux kernel\", Type=\"Iptables\", Model=\"iptables v1.4.21\", Active=1, Software_Class=\"Firewall\", Path=\"iptables\"," ]
[ "# second_dot_index, third_dot_index = -1, -1 # dot indices if second_dot_index == -1:", "ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic", "\"xxx\" and trigger_type == \"focusin\": # clears the hint serial_entry.delete(0, tk.END) return tk.TRUE", "left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial", "tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame,", "log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if", "pady=0) # end of firmware tab # developer tab: developer_tab = ttk.Frame(main_win) key_frame", "if validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry instance.delete(0,", "\"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\")", "fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP)", "global FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL while", "major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\") return tk.FALSE if not minor.isdigit(): if", "AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button =", "log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given state to all", "fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height += 530 - 383 elif sys.platform.startswith(\"linux\"):", "port:\") URL = tk.StringVar() # URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15,", "def set_buttons_to_state(state): \"\"\"Function sets the given state to all buttons. :param: given state.\"\"\"", "error_text) return # The statement below is necessary to work with url as", "clears the hint serial_entry.delete(0, tk.END) return tk.TRUE if content == \"xxx\" and trigger_type", "if _serial_validation(content, trigger_type): return tk.TRUE if instance is version_entry: if _version_validation(content, trigger_type): return", "the entry return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if", "number incremented.\") else: return def _serial_validation(content, trigger_type): if content == \"xxx\" and trigger_type", "= tk.StringVar() # URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\",", "ntpath import sys import threading import tkinter as tk from tkinter import filedialog,", "hint serial_entry.delete(0, tk.END) return tk.TRUE if content == \"xxx\" and trigger_type == \"focusout\":", "-1: # patch = \"\" # minor = \"\" # # major correction", "== \"focusin\": # clears the hint serial_entry.delete(0, tk.END) return tk.TRUE if content ==", "side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height += 530", "UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for the update to complete\")", "commands return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END) instance.insert(tk.END,", "file (*.txt) and load it to controller (developer only) * update serial and", "* browse key file (*.txt) and load it to controller (developer only) *", "key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame =", "# Setting validation to entries (serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd =", "{}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version)", "\"\"\" import argparse import ctypes import ntpath import sys import threading import tkinter", "ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint)", "comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens", "if instance is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if instance is version_entry:", "not isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if", "if first_dot_index == -1: # second_dot_index, third_dot_index = -1, -1 # dot indices", "= \"\" # # major correction major = content[second_dot_index + 1:] if not", "0) second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index + 1)", "log.insert(tk.END, \"You must specify device URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You", "return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content)", "\"\" # minor = \"\" # # major correction major = content[second_dot_index +", "Creating main window main_win = tk.Tk() # Setting window geometry and title if", "= ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path", "file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File was opened", "key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # The statement", "tk.FALSE if patch != \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should be a", "minor = content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index + 1:] if second_dot_index ==", "an infinite loop in the update stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING", "text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X,", "= ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic key", "firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button =", "\"\"\"Sets serial number, hardware version and key.\"\"\" global URL global KEY if URL.get()", "serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number must be a", "font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\")", "help=\"Choosing method: dev or cust \" \"(developer or customer)\") args = parser.parse_args() #", "serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is wrong! If you use Linux,", "AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number", "error_text) return # Checking serial and version format if not serial_entry.validate() or not", "trigger_type == \"focusin\": # clears the hint version_entry.delete(0, tk.END) return tk.TRUE if content", "height += 530 - 383 elif sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\")", "== -1: # correction third_dot_index = -1 # if third_dot_index != -1: #", "Consider -1 as dot absence if first_dot_index == -1: # second_dot_index, third_dot_index =", "0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware version", "epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END,", "must specify key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return #", "main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd,", "FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING", "error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # Checking serial and version", "if error_text: log.insert(tk.END, error_text) return # The statement below is necessary to work", "log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file dialog. We are going to read", "import ntpath import sys import threading import tkinter as tk from tkinter import", "ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button = ttk.Button(key_frame,", "side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3,", "state to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state)", "== \"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\") return if version_entry.get() == \"x.x.x\":", "\"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\",", "Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE", "ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR", "\"MINOR should be a number!\\n\") return tk.FALSE if patch != \"\" and not", "{Version.full}\") FIRMWARE = \"\" # string containing firmware # firmware tab: firmware_tab =", ".txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\",", "from tip import ToolTip from version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\",", "anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5,", "+ 1) # Consider -1 as dot absence if first_dot_index == -1: #", "window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\")", "validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of developer", "global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1))", "state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame,", "res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR", "browse key file (*.txt) and load it to controller (developer only) * update", "412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks an infinite", "and load it to controller * browse key file (*.txt) and load it", "\"You must specify key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify", "content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END)", "text=\"Set serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting", "return tk.FALSE if patch != \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should be", "text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() # URL of", "ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10))", "padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X,", "return tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\") return tk.FALSE if patch !=", "version_entry.validate(): return # The statement below is necessary to work with url as", "ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key setting. Port: {}\\n Serial number: \"", "sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame", "device is not\" \" used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"):", "KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY global URL if", "specify key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\")", "if content != \"x.x.x\" and trigger_type == \"focusin\": # just enter the entry", "tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is correct\"\"\" if content", "DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X,", "validation to entries (serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd,", "if not isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL)", "third_dot_index != -1: # there cannot be three or more dots return tk.FALSE", "file dialog. We are going to read binary files (.cod). So .encode() isn't", "-1: # there cannot be three or more dots return tk.FALSE major =", "wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END, \"Ok\\n\") else:", "= ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame =", "tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation commands return False.\"\"\" instance", "instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands developer tab\"\"\" global DEV_STATE height =", "= threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd = threading.Thread(target=update_firmware) thread_upd.start() main_win.protocol(\"WM_DELETE_WINDOW\", close_window) tk.mainloop()", "\"You must specify device URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You must", "ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP,", "a thread to update firmware RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING", "not serial_entry.validate() or not version_entry.validate(): return # The statement below is necessary to", "in the update stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\",", "and version (developer only) \"\"\" import argparse import ctypes import ntpath import sys", "\"cust\"), help=\"Choosing method: dev or cust \" \"(developer or customer)\") args = parser.parse_args()", "Port: {}\\n Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please", "== \"xxx\" and trigger_type == \"focusout\": # just leave the entry return tk.TRUE", "= ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button =", "key setting. Port: {}\\n Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get()))", "(*.txt) and load it to controller (developer only) * update serial and version", "URL global KEY if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\")", "second_dot_index + 1) # Consider -1 as dot absence if first_dot_index == -1:", "if URL.get() == \"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts the firmware", "padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height -= (530", "== 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if", "first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\",", "update to complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating main window", "return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry", "tk.StringVar() # path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame,", "url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator", "PC and load it to controller * browse key file (*.txt) and load", "work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version", "= scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\",", "error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given state to all buttons.", "= ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button =", "(\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE", "it to controller * browse key file (*.txt) and load it to controller", "(minor == \"\" and second_dot_index == -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10),", "clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file dialog. We are", "hint version_entry.delete(0, tk.END) return tk.TRUE if content != \"x.x.x\" and trigger_type == \"focusin\":", "serial.SerialException: log.insert( tk.END, \"Something is wrong! If you use Linux, open epcboot_gui with", "end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP,", "FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING:", "KEY = tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame,", "foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\") return tk.FALSE if patch", "file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"),", "not version_entry.validate(): return # The statement below is necessary to work with url", "third_dot_index = -1, -1 # dot indices if second_dot_index == -1: # correction", "if third_dot_index != -1: # there cannot be three or more dots return", "content, trigger_type): \"\"\"Checks entry format and changes font If format is ok: green", "version format if not serial_entry.validate() or not version_entry.validate(): return # The statement below", "if not minor.isdigit(): if (minor == \"\" and second_dot_index == -1 and trigger_type", "mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5)", "str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\") else: return def _serial_validation(content, trigger_type): if", "com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0)", "mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file,", "side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height += 530 - 383 elif", "UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd = threading.Thread(target=update_firmware) thread_upd.start() main_win.protocol(\"WM_DELETE_WINDOW\", close_window)", "serial and version (developer only) \"\"\" import argparse import ctypes import ntpath import", "# creating collapse button DEV_STATE = False # developer tab state collapse_frame =", "UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file dialog. Key must be .txt file.\"\"\"", "fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry", "elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in case of enabled upd_button,", "be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"):", "# correction third_dot_index = -1 # if third_dot_index != -1: # there cannot", "instance = main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def", "number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE", "global URL global KEY if URL.get() == \"\": log.insert(tk.END, \"You must specify device", "= content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index + 1:] if second_dot_index == -1:", "# patch = \"\" # minor = \"\" # # major correction major", "global URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING = True #", "fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height -= (530 - 383) elif sys.platform.startswith(\"linux\"):", "log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame =", "= ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab,", "= \"\" # # minor correction minor = content[first_dot_index + 1:] if first_dot_index", "log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url,", "return tk.TRUE if instance is version_entry: if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE", "\"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global", "\"x.x.x\" and trigger_type == \"focusin\": # just enter the entry return tk.TRUE print(content)", "return tk.TRUE if content != \"x.x.x\" and trigger_type == \"focusin\": # just enter", "dots return tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch =", "key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button", "log.insert(tk.END, \"You must specify key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text)", "char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get()))", "10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\", x) returns -1 if", "tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\":", "= epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else:", "initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)):", "specify device URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\")", "383 elif sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0)", "right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry =", "\" \"(developer or customer)\") args = parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets", "right_frame, text=\"Set serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) #", "text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X,", "be a number!\\n\") return tk.FALSE if patch != \"\" and not patch.isdigit(): log.insert(tk.END,", "UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for the update", "text=\"COM port:\") URL = tk.StringVar() # URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox,", "font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\", x)", "\"\" # string containing firmware # firmware tab: firmware_tab = ttk.Frame(main_win) com_frame =", "developer tab # creating collapse button DEV_STATE = False # developer tab state", "firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20,", "# the .find(\".\", x) returns -1 if \".\" is not found first_dot_index =", "{}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if res == 0:", "developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height -= (530 -", "\"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END,", "parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust \"", "hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the", "is version_entry: if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts", "return # The statement below is necessary to work with url as C", "# developer tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab,", "fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread to update firmware RUNNING =", "URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING = True # Button", "content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands developer tab\"\"\" global DEV_STATE height", "system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string containing firmware # firmware tab:", "global KEY global URL if URL.get() == \"\": log.insert(tk.END, \"You must specify device", "from tkinter import filedialog, font, messagebox, scrolledtext, ttk import serial import serial.tools.list_ports import", "of enabled upd_button, method .state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{}", "# in case of enabled upd_button, method .state() returns empty tuple upd_button.focus() else:", "firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware", "of developer tab # creating collapse button DEV_STATE = False # developer tab", "and changes font If format is ok: green Calibri 10 If format uncorrect:", "import serial import serial.tools.list_ports import epcbootlib import urlparse from tip import ToolTip from", "parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust \" \"(developer or customer)\")", "return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if", "returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates", "\"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END,", "= content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index", "else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware version and key.\"\"\" global", "if FIRMWARE == \"\": log.insert(tk.END, \"You must specify firmware file.\\n\") return error_text =", "+ 1:] if second_dot_index == -1: # patch = \"\" # # minor", "if instance is version_entry: if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name,", "= ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and", "first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index + 1) # Consider -1 as", "close_window(): \"\"\"This function breaks an infinite loop in the update stream.\"\"\" global RUNNING", "+ 1:] if first_dot_index == -1: # patch = \"\" # minor =", "10 \"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry if instance is serial_entry:", "version_entry.delete(0, tk.END) return tk.TRUE if content != \"x.x.x\" and trigger_type == \"focusin\": #", "you use Linux, open epcboot_gui with \" \"root.\\nIn case of using Windows, make", "= font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\")", "not\" \" used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if", "UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING = True", "firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X,", "def start_update(): \"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK if URL.get() == \"\":", "FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res ==", "not RUNNING: break UPDATE_RUNNING = True # Button is clicked set_buttons_to_state(tk.DISABLED) # The", "main window main_win = tk.Tk() # Setting window geometry and title if sys.platform.startswith(\"win\"):", "key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X,", "content.isdigit(): log.insert(tk.END, \"Serial number must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\")", "not minor.isdigit(): if (minor == \"\" and second_dot_index == -1 and trigger_type !=", "ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton(", "the hint serial_entry.delete(0, tk.END) return tk.TRUE if content == \"xxx\" and trigger_type ==", "serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri", "\"\" # # major correction major = content[second_dot_index + 1:] if not major.isdigit():", "# Button is clicked set_buttons_to_state(tk.DISABLED) # The statement below is necessary to work", "method .state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def", "== -1: # patch = \"\" # # minor correction minor = content[first_dot_index", "\"x.x.x\" and trigger_type == \"focusin\": # clears the hint version_entry.delete(0, tk.END) return tk.TRUE", "second_dot_index == -1: # patch = \"\" # # minor correction minor =", "to controller * browse key file (*.txt) and load it to controller (developer", "= tk.Tk() # Setting window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"):", "to wait for the update to complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy()", "as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port: {}.", "set_buttons_to_state(tk.DISABLED) # The statement below is necessary to work with url as C", "False def key_browse(): \"\"\"Opens file dialog. Key must be .txt file.\"\"\" global KEY", "returns -1 if \".\" is not found first_dot_index = content.find(\".\", 0) second_dot_index =", "validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END)", "= False def key_browse(): \"\"\"Opens file dialog. Key must be .txt file.\"\"\" global", "major correction major = content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should", "trigger_type): if content == \"xxx\" and trigger_type == \"focusin\": # clears the hint", "invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") #", "log.insert(tk.END, \"PATCH should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE", "tk.END) return tk.TRUE if content == \"xxx\" and trigger_type == \"focusout\": # just", "cannot be three or more dots return tk.FALSE major = content[0:first_dot_index] minor =", "command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries (serial_entry and version_entry)", "tk.END, \"Something is wrong! If you use Linux, open epcboot_gui with \" \"root.\\nIn", "foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware", "def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file dialog. We", "set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY global URL if URL.get() == \"\":", "10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30,", "and load it to controller (developer only) * update serial and version (developer", "there cannot be three or more dots return tk.FALSE major = content[0:first_dot_index] minor", "1) # Consider -1 as dot absence if first_dot_index == -1: # second_dot_index,", "\"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name)", "minor = content[first_dot_index + 1:] if first_dot_index == -1: # patch = \"\"", "right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label", "import tkinter as tk from tkinter import filedialog, font, messagebox, scrolledtext, ttk import", "foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW", "tk.Tk() # Setting window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\")", "messagebox, scrolledtext, ttk import serial import serial.tools.list_ports import epcbootlib import urlparse from tip", "only) \"\"\" import argparse import ctypes import ntpath import sys import threading import", "-1 # dot indices if second_dot_index == -1: # correction third_dot_index = -1", "text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame =", "\"%W\", \"%P\"), validate=\"all\") # end of developer tab # creating collapse button DEV_STATE", "= ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label =", "anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP,", "if trigger_type == \"focusout\": # sets the hint version_entry.config( font=( \"Calibri Italic\", 10),", "\"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\",", "tab # developer tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame =", "ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key setting. Port: {}\\n", "using Windows, make sure that the device is not\" \" used by another", "= serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is wrong! If you use", "com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar()", "else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if", "text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame,", "log.insert(tk.END, \"You must specify version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text)", "False main_win.destroy() # Creating main window main_win = tk.Tk() # Setting window geometry", "return def _serial_validation(content, trigger_type): if content == \"xxx\" and trigger_type == \"focusin\": #", "width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5,", "on PC and load it to controller * browse key file (*.txt) and", "only) * update serial and version (developer only) \"\"\" import argparse import ctypes", "1:] if second_dot_index == -1: # patch = \"\" # # minor correction", "ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial and", "= ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT,", ".py file offers GUI for EPCboot. It allows: * browse firmware on PC", "second_dot_index == -1: # correction third_dot_index = -1 # if third_dot_index != -1:", "= int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\") else:", "global URL if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return", "def collapse(): \"\"\"Function collapses or expands developer tab\"\"\" global DEV_STATE height = main_win.winfo_height()", "== \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial", "if version format is correct\"\"\" if content == \"x.x.x\" and trigger_type == \"focusin\":", "sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\"", "= ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\")", "enabled upd_button, method .state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is", "side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries (serial_entry and version_entry) vcmd =", "\"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number =", "command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3,", "separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N)", "a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type):", "FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else: upd_button.focus() def start_update():", "side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE,", "tkinter as tk from tkinter import filedialog, font, messagebox, scrolledtext, ttk import serial", "ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key", "global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for the", "collapse button DEV_STATE = False # developer tab state collapse_frame = ttk.Frame(main_win) collapse_button", "= parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port", "dev or cust \" \"(developer or customer)\") args = parser.parse_args() # Event handlers", "the .find(\".\", x) returns -1 if \".\" is not found first_dot_index = content.find(\".\",", "main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks an infinite loop", "GUI for EPCboot. It allows: * browse firmware on PC and load it", "width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial and hardware version\", width=30, command=ident_and_key_set)", "to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode())", "correction major = content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should be", "url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key", "\"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\", x) returns", "else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for", "type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() ==", "specify serial number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\")", "leave the entry return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\")", "if first_dot_index == -1: # patch = \"\" # minor = \"\" #", "filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File was", "\"\": log.insert(tk.END, \"You must specify key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END,", "number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res", "urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given", "ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE,", "format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE))", "FIRMWARE == \"\": log.insert(tk.END, \"You must specify firmware file.\\n\") return error_text = urlparse.validate(URL.get())", "True if sys.platform.startswith(\"win\"): height += 530 - 383 elif sys.platform.startswith(\"linux\"): height += (568", "ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT)", "the given state to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state)", "cryptographic key.\"\"\" global KEY global URL if URL.get() == \"\": log.insert(tk.END, \"You must", "= ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label =", "height -= (530 - 383) elif sys.platform.startswith(\"linux\"): height -= (568 - 412) else:", "{}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res", "KEY if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if", "if content == \"xxx\" and trigger_type == \"focusin\": # clears the hint serial_entry.delete(0,", "threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd = threading.Thread(target=update_firmware) thread_upd.start() main_win.protocol(\"WM_DELETE_WINDOW\", close_window) tk.mainloop() thread_upd.join()", "content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index + 1:] if second_dot_index == -1: #", "if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return error_text = urlparse.validate(URL.get())", "padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X,", "Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if res ==", "This .py file offers GUI for EPCboot. It allows: * browse firmware on", "trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is correct\"\"\" if content == \"x.x.x\" and", "containing firmware # firmware tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\")", "text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17)", "version) if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial():", "return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\") return if", "serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if", "text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to firmware", "Windows, make sure that the device is not\" \" used by another program.\\n\")", "while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING = True # Button is", "in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file", "res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END,", "all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def", "to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting", "key.\"\"\" global URL global KEY if URL.get() == \"\": log.insert(tk.END, \"You must specify", "= filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if", "def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK global", "+ 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\") return tk.FALSE", "= -1, -1 # dot indices if second_dot_index == -1: # correction third_dot_index", "version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT,", "10 If format uncorrect: red Calibri 10 If empty: sets hint, grey Calibri", "log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware version and", "+= (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function", "tip import ToolTip from version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\",", "the entry return tk.TRUE print(content) if content == \"\": if trigger_type == \"focusout\":", "clicked set_buttons_to_state(tk.DISABLED) # The statement below is necessary to work with url as", "\"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"),", "by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == ():", "!= -1: # there cannot be three or more dots return tk.FALSE major", "isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get()", "text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT,", "url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END,", "URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return if", "identificator and key setting. Port: {}\\n Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(),", "tk.TRUE # the .find(\".\", x) returns -1 if \".\" is not found first_dot_index", "return tk.TRUE # the .find(\".\", x) returns -1 if \".\" is not found", "# major correction major = content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR", "if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if KEY.get()", "to controller (developer only) * update serial and version (developer only) \"\"\" import", "{}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE,", "-1 if \".\" is not found first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\",", "format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab,", "version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\",", "should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name,", "opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY global URL", "= content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index + 1) # Consider", "# Add a thread to update firmware RUNNING = True UPDATE_LOCK = threading.Lock()", "else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file dialog. Key", "if not content.isdigit(): log.insert(tk.END, \"Serial number must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\",", "be three or more dots return tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index", "separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"):", "necessary to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END,", "work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END,", "complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating main window main_win =", "or customer)\") args = parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global", "sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in case of enabled upd_button, method", "third_dot_index = content.find(\".\", second_dot_index + 1) # Consider -1 as dot absence if", "version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res", "+ 1) third_dot_index = content.find(\".\", second_dot_index + 1) # Consider -1 as dot", "trigger_type == \"focusout\": # sets the hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\")", "breaks an infinite loop in the update stream.\"\"\" global RUNNING global UPDATE_LOCK global", "global KEY if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return", "is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if instance is version_entry: if _version_validation(content,", "patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return", "text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial and hardware version\",", "update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to firmware firmware_entry", "serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if instance is version_entry: if _version_validation(content, trigger_type):", "return tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index", "key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame =", "_update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\"", "test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is wrong! If you", "title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): #", "tk.END, \"Starting firmware update. Port: {}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get())))", "\"\"\" This .py file offers GUI for EPCboot. It allows: * browse firmware", "== \"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts the firmware update.\"\"\" global", "\"\"\"Opens file dialog. Key must be .txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile(", "anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP,", "ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT,", "\"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0',", "= content.find(\".\", second_dot_index + 1) # Consider -1 as dot absence if first_dot_index", "side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE,", "upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global", "10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input", "ToolTip from version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing", "UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING", "else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string containing firmware #", "developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label =", "Setting window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown", "tk.END, \"Starting identificator and key setting. Port: {}\\n Serial number: \" \"{}\\n Hardware", "import threading import tkinter as tk from tkinter import filedialog, font, messagebox, scrolledtext,", "return tk.FALSE if not minor.isdigit(): if (minor == \"\" and second_dot_index == -1", "tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\")", "Setting validation to entries (serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command)", "hardware version and key.\"\"\" global URL global KEY if URL.get() == \"\": log.insert(tk.END,", "<gh_stars>0 \"\"\" This .py file offers GUI for EPCboot. It allows: * browse", "format is ok: green Calibri 10 If format uncorrect: red Calibri 10 If", "\"\"\"Checks entry format and changes font If format is ok: green Calibri 10", "Set key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame", "firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4)", "needed.\"\"\" global FIRM_PATH global FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select", "= ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE,", "font If format is ok: green Calibri 10 If format uncorrect: red Calibri", "False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\")", "- 383) elif sys.platform.startswith(\"linux\"): height -= (568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE,", "int(serial_entry.get()), version) if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def", "10 If empty: sets hint, grey Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name)", "and version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd,", "foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not", "dialog. We are going to read binary files (.cod). So .encode() isn't needed.\"\"\"", "\" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res =", "patch = \"\" # minor = \"\" # # major correction major =", "and key.\"\"\" global URL global KEY if URL.get() == \"\": log.insert(tk.END, \"You must", "upd_button, method .state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get()))", "return tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation commands return False.\"\"\"", "= main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH)", "expands developer tab\"\"\" global DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE:", "main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\") else:", "minor = \"\" # # major correction major = content[second_dot_index + 1:] if", "and trigger_type == \"focusin\": # clears the hint version_entry.delete(0, tk.END) return tk.TRUE if", "used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() ==", "must specify serial number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify", "a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns", "and trigger_type == \"focusout\": # just leave the entry return tk.TRUE if content", "firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def", "is necessary to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key", "update firmware RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd", "trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation commands return", "side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab #", "return tk.TRUE if content == \"xxx\" and trigger_type == \"focusout\": # just leave", "minor.isdigit(): if (minor == \"\" and second_dot_index == -1 and trigger_type != \"focusout\"):", "clears the hint version_entry.delete(0, tk.END) return tk.TRUE if content != \"x.x.x\" and trigger_type", "is not\" \" used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get()))", "main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string containing firmware # firmware tab: firmware_tab", "epcboot_gui with \" \"root.\\nIn case of using Windows, make sure that the device", "tk.TRUE if content == \"xxx\" and trigger_type == \"focusout\": # just leave the", "is not found first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index + 1)", "fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4)", "wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\")", "error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # The statement below is", "# The statement below is necessary to work with url as C char*", "content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index + 1:] if second_dot_index", "Calibri 10 If empty: sets hint, grey Calibri Italic 10 \"\"\" instance =", "log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE,", "main_win.nametowidget(widget_name) # getting certain entry if instance is serial_entry: if _serial_validation(content, trigger_type): return", "global DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT,", "fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP,", "RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING = True # Button is clicked", "tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\",", "be .txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text", "_autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number +", "filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File was", "= True # Button is clicked set_buttons_to_state(tk.DISABLED) # The statement below is necessary", "binary files (.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global URL", "== \"focusout\": # just leave the entry return tk.TRUE if content == \"\":", "ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH,", "elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string", "serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number must", "tkinter import filedialog, font, messagebox, scrolledtext, ttk import serial import serial.tools.list_ports import epcbootlib", "more dots return tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch", "grey Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry if", "\"\"\"This function breaks an infinite loop in the update stream.\"\"\" global RUNNING global", "elif sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def", "global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All", "\"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END,", "button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3,", "+ 1)) log.insert(tk.END, \"Serial number incremented.\") else: return def _serial_validation(content, trigger_type): if content", "padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X,", "If empty: sets hint, grey Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) #", "pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE,", "wrong! If you use Linux, open epcboot_gui with \" \"root.\\nIn case of using", "_serial_validation(content, trigger_type): if content == \"xxx\" and trigger_type == \"focusin\": # clears the", "-= (530 - 383) elif sys.platform.startswith(\"linux\"): height -= (568 - 412) else: separator.pack_forget()", "if not isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function", "ok: green Calibri 10 If format uncorrect: red Calibri 10 If empty: sets", "tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH =", "firmware # firmware tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label", "tk.TRUE if content != \"x.x.x\" and trigger_type == \"focusin\": # just enter the", "key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY", "else: return def _serial_validation(content, trigger_type): if content == \"xxx\" and trigger_type == \"focusin\":", "firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport", "Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update()", "# end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N)", "serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\") return if version_entry.get() ==", "So .encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global URL main_win.firmware = filedialog.askopenfile(", "UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if", "key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key \", command=set_key)", "sets hint, grey Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) # getting certain", "global FIRM_PATH global FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\",", "just leave the entry return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10),", "separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end", "indices if second_dot_index == -1: # correction third_dot_index = -1 # if third_dot_index", "== \"focusin\": # clears the hint version_entry.delete(0, tk.END) return tk.TRUE if content !=", "\"focusout\": # sets the hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\")", "validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and changes font If format is ok:", "File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus()", "version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\") return tk.FALSE", "com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of", "should be a number!\\n\") return tk.FALSE if not minor.isdigit(): if (minor == \"\"", "(serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"),", "log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread", "padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame", "= ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of", "\" \"root.\\nIn case of using Windows, make sure that the device is not\"", "== (): # in case of enabled upd_button, method .state() returns empty tuple", "pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5)", "upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6)", "log.insert(tk.END, \"You must specify serial number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You", "\"\"\"Function sets the given state to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state)", "Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get()) test_port.close()", "button DEV_STATE = False # developer tab state collapse_frame = ttk.Frame(main_win) collapse_button =", "in case of enabled upd_button, method .state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus()", "tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number must be a number!\\n\") return tk.FALSE", "tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name)", "necessary to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key =", "textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\"))", "# there cannot be three or more dots return tk.FALSE major = content[0:first_dot_index]", "= ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH)", "log.insert(tk.END, error_text) return # Checking serial and version format if not serial_entry.validate() or", "version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button", "pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT,", "\"You must specify key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return", "\"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\") return", "combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) #", "\"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end", "content == \"\": if trigger_type == \"focusout\": # sets the hint version_entry.config( font=(", "The statement below is necessary to work with url as C char* url", "second_dot_index, third_dot_index = -1, -1 # dot indices if second_dot_index == -1: #", "tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label", "log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height", "global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All", "log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END,", "# # major correction major = content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END,", "command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH)", "if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return if serial_entry.get() ==", "collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH,", "if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global", "= ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH)", "ident_and_key_set(): \"\"\"Sets serial number, hardware version and key.\"\"\" global URL global KEY if", "trigger_type == \"focusin\": # just enter the entry return tk.TRUE print(content) if content", "* update serial and version (developer only) \"\"\" import argparse import ctypes import", "set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries (serial_entry and version_entry) vcmd = main_win.register(validation_command)", "ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW,", "== \"xxx\" and trigger_type == \"focusin\": # clears the hint serial_entry.delete(0, tk.END) return", "def close_window(): \"\"\"This function breaks an infinite loop in the update stream.\"\"\" global", "was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else:", "\"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number", "command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE,", "epcbootlib import urlparse from tip import ToolTip from version import Version parser =", "Key must be .txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select", "epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set():", "patch != \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\") return", "padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log =", "def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks an infinite loop in", "set_key_button = ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse)", "scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log)", "below is necessary to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode())", "and not patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10),", ".encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\",", "log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame)", "open epcboot_gui with \" \"root.\\nIn case of using Windows, make sure that the", "com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin", "main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" #", "-1 as dot absence if first_dot_index == -1: # second_dot_index, third_dot_index = -1,", "the update to complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating main", "content): \"\"\"Starts if validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain", "trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END,", "= urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # Checking serial and version format", "# URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint", "= tk.StringVar() # path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button =", "\"\"\"Function opens file dialog. We are going to read binary files (.cod). So", ":param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates", "error_text: log.insert(tk.END, error_text) return # The statement below is necessary to work with", "return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is correct\"\"\" if", "settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() # URL of port", "number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks", "\"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file dialog. We are going", "combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\",", "set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0)", ".find(\".\", x) returns -1 if \".\" is not found first_dot_index = content.find(\".\", 0)", "serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file dialog.", "(developer only) * update serial and version (developer only) \"\"\" import argparse import", "# getting certain entry if instance is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE", "== -1: # second_dot_index, third_dot_index = -1, -1 # dot indices if second_dot_index", "# cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key", "= ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key setting. Port:", "font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto", "collapse(): \"\"\"Function collapses or expands developer tab\"\"\" global DEV_STATE height = main_win.winfo_height() width", "minor correction minor = content[first_dot_index + 1:] if first_dot_index == -1: # patch", "upd_button.focus() def start_update(): \"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK if URL.get() ==", "ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X,", "start_update(): \"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END,", "log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file", "= ttk.Button( right_frame, text=\"Set serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE,", "first_dot_index == -1: # patch = \"\" # minor = \"\" # #", "\"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read()", "log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0:", "wait for the update to complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy() #", "import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or", "messagebox.showinfo(\"Information\", \"You need to wait for the update to complete\") return UPDATE_LOCK.release() RUNNING", "ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse", "\"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key)", "KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\",", "instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands developer tab\"\"\"", "initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)):", "and trigger_type == \"focusin\": # clears the hint serial_entry.delete(0, tk.END) return tk.TRUE if", "-= (568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH)", "return UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating main window main_win = tk.Tk()", "text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() #", "command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) #", "key.\"\"\" global KEY global URL if URL.get() == \"\": log.insert(tk.END, \"You must specify", "collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\")", "fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a", "to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware)", "test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is wrong! If you use Linux, open", "end of firmware tab # developer tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab,", "instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands developer tab\"\"\" global DEV_STATE", "serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\") else: return def", "if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE =", "def invalid_command(widget_name, content): \"\"\"Starts if validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name) #", "= ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame,", "firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab", "are going to read binary files (.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH", "end of developer tab # creating collapse button DEV_STATE = False # developer", "the hint version_entry.delete(0, tk.END) return tk.TRUE if content != \"x.x.x\" and trigger_type ==", "sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in case of", "else: upd_button.focus() def start_update(): \"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK if URL.get()", "Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry if instance is", "certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands", "firmware RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd =", "firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to firmware firmware_entry =", "if sys.platform.startswith(\"win\"): height += 530 - 383 elif sys.platform.startswith(\"linux\"): height += (568 -", "!= \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\")", "file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url,", "ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\",", "tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index +", "== \"\" and second_dot_index == -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\")", "sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window():", "opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else: upd_button.focus()", "print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string containing firmware # firmware", "ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res", "def ident_and_key_set(): \"\"\"Sets serial number, hardware version and key.\"\"\" global URL global KEY", "and trigger_type == \"focusin\": # just enter the entry return tk.TRUE print(content) if", "update serial and version (developer only) \"\"\" import argparse import ctypes import ntpath", "return # Checking serial and version format if not serial_entry.validate() or not version_entry.validate():", "foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\", x) returns -1 if \".\"", "side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT,", "files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def", "specify key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # The", "UPDATE_RUNNING = True # Button is clicked set_buttons_to_state(tk.DISABLED) # The statement below is", "collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE,", "# just leave the entry return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\",", "com_label = ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() # URL of port combox", "(.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global URL main_win.firmware =", "import filedialog, font, messagebox, scrolledtext, ttk import serial import serial.tools.list_ports import epcbootlib import", "!= \"x.x.x\" and trigger_type == \"focusin\": # just enter the entry return tk.TRUE", "tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\"", "key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global", "filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not", "0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get():", "Linux, open epcboot_gui with \" \"root.\\nIn case of using Windows, make sure that", "side=tk.BOTTOM) # Setting validation to entries (serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd", "just enter the entry return tk.TRUE print(content) if content == \"\": if trigger_type", "padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end", "serial.tools.list_ports import epcbootlib import urlparse from tip import ToolTip from version import Version", "sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label =", "for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function", "with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update.", "url as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port:", "or more dots return tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index]", "ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10))", "fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height += 530 -", "developer tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\")", "\"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar()", "to entries (serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\",", "We are going to read binary files (.cod). So .encode() isn't needed.\"\"\" global", "if not major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\") return tk.FALSE if not", "height -= (568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM,", "content == \"xxx\" and trigger_type == \"focusin\": # clears the hint serial_entry.delete(0, tk.END)", "read binary files (.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global", "main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res == 0: log.insert(tk.END, \"Ok\\n\")", "firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button", "\"Serial number must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE", "it to controller (developer only) * update serial and version (developer only) \"\"\"", "number!\\n\") return tk.FALSE if not minor.isdigit(): if (minor == \"\" and second_dot_index ==", "_version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is correct\"\"\" if content == \"x.x.x\"", "Calibri 10 If format uncorrect: red Calibri 10 If empty: sets hint, grey", "version and key.\"\"\" global URL global KEY if URL.get() == \"\": log.insert(tk.END, \"You", "format is correct\"\"\" if content == \"x.x.x\" and trigger_type == \"focusin\": # clears", "the device is not\" \" used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif", "is ok: green Calibri 10 If format uncorrect: red Calibri 10 If empty:", "return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # Checking serial and", "width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED,", "tab # creating collapse button DEV_STATE = False # developer tab state collapse_frame", "return if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return error_text =", "return if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return if serial_entry.get()", "URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return error_text", "left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame", "absence if first_dot_index == -1: # second_dot_index, third_dot_index = -1, -1 # dot", "firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() #", "patch = \"\" # # minor correction minor = content[first_dot_index + 1:] if", "= ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\",", "\"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number must be a number!\\n\")", "serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return", "is clicked set_buttons_to_state(tk.DISABLED) # The statement below is necessary to work with url", "= content[second_dot_index + 1:] if second_dot_index == -1: # patch = \"\" #", "firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3,", "com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input", "to update firmware RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False", "# Creating main window main_win = tk.Tk() # Setting window geometry and title", "statement below is necessary to work with url as C char* url =", "if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\") return if version_entry.get()", "file dialog. Key must be .txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\",", "serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd,", "and version format if not serial_entry.validate() or not version_entry.validate(): return # The statement", "certain entry if instance is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if instance", "com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame =", "instance is version_entry: if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name, content):", "# Setting window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else:", "make sure that the device is not\" \" used by another program.\\n\") if", "main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\")))", "\"You must specify firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text)", "def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans", "must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content,", "sys.platform.startswith(\"win\"): height += 530 - 383 elif sys.platform.startswith(\"linux\"): height += (568 - 412)", "= ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame", "\"focusout\": # just leave the entry return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri", "!= \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\") return tk.FALSE", "handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except", "correct\"\"\" if content == \"x.x.x\" and trigger_type == \"focusin\": # clears the hint", "set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set", "res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def", "cust \" \"(developer or customer)\") args = parser.parse_args() # Event handlers def com_chosen(event=None):", "version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key setting. Port: {}\\n Serial", "FIRM_PATH = tk.StringVar() # path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button", "number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame)", "tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands developer tab\"\"\" global", "global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait", "program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in", "URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in case of enabled upd_button, method .state()", "should be a number!\\n\") return tk.FALSE if patch != \"\" and not patch.isdigit():", "hint, grey Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry", "another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): #", "entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or expands developer", "collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5)", "padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT)", "key, int(serial_entry.get()), version) if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\")", "1) third_dot_index = content.find(\".\", second_dot_index + 1) # Consider -1 as dot absence", "firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\")", "and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\")", "\"\": log.insert(tk.END, \"You must specify key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You", "\"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END,", "tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version", "# clears the hint serial_entry.delete(0, tk.END) return tk.TRUE if content == \"xxx\" and", "width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM,", "elif sys.platform.startswith(\"linux\"): height -= (568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH)", "error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets", "File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY", "dot absence if first_dot_index == -1: # second_dot_index, third_dot_index = -1, -1 #", "second_dot_index == -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END,", "def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number", "\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry if instance is serial_entry: if", "C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port:", "to complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating main window main_win", "= ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\")", "key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File", "orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) # end of collapse button", "ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font", "res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res == 0: log.insert(tk.END, \"Ok\\n\") _autoincrement_serial()", "setting. Port: {}\\n Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END,", "False if sys.platform.startswith(\"win\"): height -= (530 - 383) elif sys.platform.startswith(\"linux\"): height -= (568", "- 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks an", "major = content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index + 1:]", "if second_dot_index == -1: # correction third_dot_index = -1 # if third_dot_index !=", "version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of developer tab", "= False main_win.destroy() # Creating main window main_win = tk.Tk() # Setting window", "fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab # developer", "ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\")", "not content.isdigit(): log.insert(tk.END, \"Serial number must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10),", "version_entry: if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if", "Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry", "\"xxx\" and trigger_type == \"focusout\": # just leave the entry return tk.TRUE if", "version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\", x) returns -1 if \".\" is", "side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height -= (530 - 383) elif", "and second_dot_index == -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE", "= ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY =", "= ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial", "= tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button(", "FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else: upd_button.focus() def", "underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\",", "URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if FIRMWARE ==", "version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # Checking serial", "to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state)", "update. Port: {}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\")", "fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE,", "\"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware version and key.\"\"\" global URL global", "\"\"\"Function sets cryptographic key.\"\"\" global KEY global URL if URL.get() == \"\": log.insert(tk.END,", "= ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10))", "com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame,", "content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index + 1) # Consider -1", "ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame =", "side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\")", "with \" \"root.\\nIn case of using Windows, make sure that the device is", "isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic", "side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame,", "\"\"\"Returns tk.TRUE if version format is correct\"\"\" if content == \"x.x.x\" and trigger_type", "allows: * browse firmware on PC and load it to controller * browse", "version format is correct\"\"\" if content == \"x.x.x\" and trigger_type == \"focusin\": #", "font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\")", "log.\"\"\" log.delete('1.0', tk.END) def browse_firmware(): \"\"\"Function opens file dialog. We are going to", "changes font If format is ok: green Calibri 10 If format uncorrect: red", "controller * browse key file (*.txt) and load it to controller (developer only)", "Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key,", "firmware update.\"\"\" global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You must specify device", "files\", \"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File was opened", "\"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file dialog. Key must be", "== -1: # patch = \"\" # minor = \"\" # # major", "or expands developer tab\"\"\" global DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width() if", "log.insert(tk.END, \"Ok\\n\") _autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number", "if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation", "\"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware version and key.\"\"\"", "ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button =", "is necessary to work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert(", "= ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\")", "log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification)", "log.edit_modified(0) def close_window(): \"\"\"This function breaks an infinite loop in the update stream.\"\"\"", "if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0,", "filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not", "key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if", "loop in the update stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING:", "10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry =", "if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\")", "error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given state to", "foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame,", "URL.get() == \"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts the firmware update.\"\"\"", "fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\",", "log.insert(tk.END, \"Serial number incremented.\") else: return def _serial_validation(content, trigger_type): if content == \"xxx\"", "url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port: {}. Firmware file: {}\\n\".", "com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10)", "DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False", "serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log", "path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10,", "\"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res == 0:", "load it to controller (developer only) * update serial and version (developer only)", "\", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame)", "= main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\")", "args = parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try:", "# if third_dot_index != -1: # there cannot be three or more dots", "content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\") return", "# developer tab state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse)", "cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key \",", "KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return error_text = urlparse.validate(URL.get()) if", "error_text: log.insert(tk.END, error_text) return # Checking serial and version format if not serial_entry.validate()", "char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting", "version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar()", "import ToolTip from version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"),", "files (.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global URL main_win.firmware", "tk.FALSE if not minor.isdigit(): if (minor == \"\" and second_dot_index == -1 and", "URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in case of enabled", "1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\") return tk.FALSE if", "text=\" Set key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame)", "device URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END, \"You must specify firmware file.\\n\")", "instance = main_win.nametowidget(widget_name) # getting certain entry if instance is serial_entry: if _serial_validation(content,", "len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING =", "{}\\n Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\")", "= content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\")", "set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file dialog. Key must be .txt", "COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END)", "ctypes import ntpath import sys import threading import tkinter as tk from tkinter", "FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if", "threading import tkinter as tk from tkinter import filedialog, font, messagebox, scrolledtext, ttk", "content[second_dot_index + 1:] if second_dot_index == -1: # patch = \"\" # #", "ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic key key_entry", "validate=\"all\") # end of developer tab # creating collapse button DEV_STATE = False", "# just enter the entry return tk.TRUE print(content) if content == \"\": if", "main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function", "# end of firmware tab # developer tab: developer_tab = ttk.Frame(main_win) key_frame =", "patch = content[second_dot_index + 1:] if second_dot_index == -1: # patch = \"\"", "global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for the update to", "version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8,", "version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev", "\"%P\"), validate=\"all\") # end of developer tab # creating collapse button DEV_STATE =", "530 - 383 elif sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None):", "main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"),", "== \"\": log.insert(tk.END, \"You must specify key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END,", ".state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox():", "-1: # correction third_dot_index = -1 # if third_dot_index != -1: # there", "== \"\": log.insert(tk.END, \"You must specify firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if", "= ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port: {}. Firmware file: {}\\n\". format(", "\" used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state()", "elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label", "key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT)", "\".\" is not found first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index +", "collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X, padx=5,", "fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT)", "global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return", "invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of developer tab # creating collapse button", "except serial.SerialException: log.insert( tk.END, \"Something is wrong! If you use Linux, open epcboot_gui", "choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust \" \"(developer or customer)\") args =", "RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING:", "== \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text:", "= epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def", "tk.END) return tk.TRUE if content != \"x.x.x\" and trigger_type == \"focusin\": # just", "set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries (serial_entry and version_entry) vcmd", "serial_entry.delete(0, tk.END) return tk.TRUE if content == \"xxx\" and trigger_type == \"focusout\": #", "Add a thread to update firmware RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire()", "if (minor == \"\" and second_dot_index == -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\",", "width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT)", "log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height += 530 - 383", "sys import threading import tkinter as tk from tkinter import filedialog, font, messagebox,", "= main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget()", "fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14)", "right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial and hardware", "sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string containing", "empty: sets hint, grey Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) # getting", "if not RUNNING: break UPDATE_RUNNING = True # Button is clicked set_buttons_to_state(tk.DISABLED) #", "enter the entry return tk.TRUE print(content) if content == \"\": if trigger_type ==", "else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0,", "developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"): height +=", "update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING", "ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame,", "com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif", "= filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\", filetypes=((\"Text files\", \"*.txt\"), (\"All files\", \"*.*\"))) if", "# Checking serial and version format if not serial_entry.validate() or not version_entry.validate(): return", "developer tab\"\"\" global DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget()", "entry return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type", "Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\",", "EPCboot. It allows: * browse firmware on PC and load it to controller", "if not serial_entry.validate() or not version_entry.validate(): return # The statement below is necessary", "content[first_dot_index + 1:] if first_dot_index == -1: # patch = \"\" # minor", "URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\",", "creating collapse button DEV_STATE = False # developer tab state collapse_frame = ttk.Frame(main_win)", "third_dot_index = -1 # if third_dot_index != -1: # there cannot be three", "import epcbootlib import urlparse from tip import ToolTip from version import Version parser", "state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5)", "10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and changes", "with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version =", "set_ident_button = ttk.Button( right_frame, text=\"Set serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP)", "serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT)", "RUNNING = False main_win.destroy() # Creating main window main_win = tk.Tk() # Setting", "URL if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if", "correction minor = content[first_dot_index + 1:] if first_dot_index == -1: # patch =", "the firmware update.\"\"\" global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You must specify", "(530 - 383) elif sys.platform.startswith(\"linux\"): height -= (568 - 412) else: separator.pack_forget() log_frame.pack_forget()", "text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic key key_entry =", "\"Something is wrong! If you use Linux, open epcboot_gui with \" \"root.\\nIn case", "postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font =", "== \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END,", "number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return error_text", "tk from tkinter import filedialog, font, messagebox, scrolledtext, ttk import serial import serial.tools.list_ports", "\"You must specify version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return", "\"root.\\nIn case of using Windows, make sure that the device is not\" \"", "- 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE =", "given state to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state)", "log.insert(tk.END, error_text) return # The statement below is necessary to work with url", "device URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return", "was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY global", "serial number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return", "serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\") else: return def _serial_validation(content, trigger_type):", "= ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL", "tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\") return tk.FALSE if patch != \"\"", "instance is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if instance is version_entry: if", "URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END, \"You must specify firmware file.\\n\") return", "wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X)", "chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log():", "not found first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index", "\"focusin\": # clears the hint version_entry.delete(0, tk.END) return tk.TRUE if content != \"x.x.x\"", "padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) #", "need to wait for the update to complete\") return UPDATE_LOCK.release() RUNNING = False", "height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM,", "com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert(", "content == \"xxx\" and trigger_type == \"focusout\": # just leave the entry return", "Calibri Italic 10 \"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry if instance", "Button is clicked set_buttons_to_state(tk.DISABLED) # The statement below is necessary to work with", "ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4)", "entry if instance is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if instance is", "key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify serial number.\\n\") return", "= content[first_dot_index + 1:] if first_dot_index == -1: # patch = \"\" #", "log.bind(\"<<Modified>>\", on_modification) # Add a thread to update firmware RUNNING = True UPDATE_LOCK", "log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM,", "log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread to update firmware", "version_entry.insert(tk.END, \"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR)", "KEY global URL if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\")", "text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5,", "ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator = ttk.Separator(collapse_frame, orient=\"horizontal\") collapse_button.pack(side=tk.LEFT, anchor=tk.NW, padx=5) separator.pack(expand=tk.TRUE, side=tk.RIGHT,", "# Consider -1 as dot absence if first_dot_index == -1: # second_dot_index, third_dot_index", "Checking serial and version format if not serial_entry.validate() or not version_entry.validate(): return #", "+ 1:second_dot_index] patch = content[second_dot_index + 1:] if second_dot_index == -1: # patch", "key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT,", "import argparse import ctypes import ntpath import sys import threading import tkinter as", "import serial.tools.list_ports import epcbootlib import urlparse from tip import ToolTip from version import", "if content == \"x.x.x\" and trigger_type == \"focusin\": # clears the hint version_entry.delete(0,", "files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE =", "firmware update. Port: {}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please", "version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri", "Port: {}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update()", "try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is wrong! If", "= ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button", "version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame,", "= ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY)", "= False if sys.platform.startswith(\"win\"): height -= (530 - 383) elif sys.platform.startswith(\"linux\"): height -=", "import urlparse from tip import ToolTip from version import Version parser = argparse.ArgumentParser()", "wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res == 0: log.insert(tk.END,", "upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts the", "\"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of", "_version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation commands", "or not version_entry.validate(): return # The statement below is necessary to work with", "\"\"\"Sets URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END,", "return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return error_text =", "tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number must be", "tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and changes font If format", "383) elif sys.platform.startswith(\"linux\"): height -= (568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP,", "found first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index =", "ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar()", "upd_button.state() == (): # in case of enabled upd_button, method .state() returns empty", "(): # in case of enabled upd_button, method .state() returns empty tuple upd_button.focus()", "combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def browse_firmware():", "side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26)", "If you use Linux, open epcboot_gui with \" \"root.\\nIn case of using Windows,", "if content == \"\": if trigger_type == \"focusout\": # sets the hint version_entry.config(", "# patch = \"\" # # minor correction minor = content[first_dot_index + 1:]", "correction third_dot_index = -1 # if third_dot_index != -1: # there cannot be", "UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING = True # Button is clicked set_buttons_to_state(tk.DISABLED)", "tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM", "= ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please", "opens file dialog. We are going to read binary files (.cod). So .encode()", "title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE", "case of enabled upd_button, method .state() returns empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END,", "content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index +", "serial and version format if not serial_entry.validate() or not version_entry.validate(): return # The", "window main_win = tk.Tk() # Setting window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\")", "ttk.Button( right_frame, text=\"Set serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM)", "set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame", "if sys.platform.startswith(\"win\"): height -= (530 - 383) elif sys.platform.startswith(\"linux\"): height -= (568 -", "tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set", "log.insert(tk.END, \"You must specify key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must", "log.insert(tk.END, \"You must specify device URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END, \"You", "char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port: {}. Firmware file:", "== \"x.x.x\" and trigger_type == \"focusin\": # clears the hint version_entry.delete(0, tk.END) return", "# dot indices if second_dot_index == -1: # correction third_dot_index = -1 #", "specify version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # Checking", "firmware tab # developer tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame", "= urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the", "if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for the update to complete\") return", "RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for", "side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread to", "font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if", "FIRM_PATH global FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware", "textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\" Set key \", command=set_key) key_browse_button = ttk.Button(key_frame, text=\"Browse...\",", "textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\",", "key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update()", "return tk.FALSE def invalid_command(widget_name, content): \"\"\"Starts if validation commands return False.\"\"\" instance =", "string containing firmware # firmware tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM", "must specify firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return", "first_dot_index == -1: # second_dot_index, third_dot_index = -1, -1 # dot indices if", "ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\") ident_frame = ttk.Labelframe(developer_tab, text=\"Identification\") key_label = ttk.Label(key_frame, text=\"Key:\")", "\"x.x.x\") AUTOINCR = tk.BooleanVar() set_autoincrement_button = ttk.Checkbutton( right_frame, text=\"Auto increment\", width=30, variable=AUTOINCR) set_ident_button", "ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\"", "entries (serial_entry and version_entry) vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\",", "version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END,", "DEV_STATE = False # developer tab state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame,", "from version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method:", "\"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\",", "trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be a", "tab state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator =", "Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"):", "def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY global URL if URL.get() ==", "ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame", "True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd = threading.Thread(target=update_firmware) thread_upd.start() main_win.protocol(\"WM_DELETE_WINDOW\",", "is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def", "(568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE", "ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() # URL of port combox = ttk.Combobox(com_frame,", "with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting", "trigger_type): \"\"\"Checks entry format and changes font If format is ok: green Calibri", "browse_firmware(): \"\"\"Function opens file dialog. We are going to read binary files (.cod).", "URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something", "serial import serial.tools.list_ports import epcbootlib import urlparse from tip import ToolTip from version", "foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is correct\"\"\"", "ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP,", "\"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file dialog.", "1:second_dot_index] patch = content[second_dot_index + 1:] if second_dot_index == -1: # patch =", "version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries (serial_entry", "padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT, padx=10) com_hint.pack(side=tk.LEFT) firmware_label.pack(side=tk.LEFT) firmware_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=14) firmware_browse_button.pack(side=tk.LEFT,", "version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()),", "\"Serial number incremented.\") else: return def _serial_validation(content, trigger_type): if content == \"xxx\" and", "foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and changes font", "firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): # File", "format uncorrect: red Calibri 10 If empty: sets hint, grey Calibri Italic 10", "global FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire()", "\"x.x.x\") return tk.TRUE # the .find(\".\", x) returns -1 if \".\" is not", "state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global", "format and changes font If format is ok: green Calibri 10 If format", "serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14) version_label.pack(side=tk.LEFT) version_entry.pack(side=tk.LEFT, padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win,", "def _serial_validation(content, trigger_type): if content == \"xxx\" and trigger_type == \"focusin\": # clears", "on_modification) # Add a thread to update firmware RUNNING = True UPDATE_LOCK =", "# getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses", "scrolledtext, ttk import serial import serial.tools.list_ports import epcbootlib import urlparse from tip import", "argparse import ctypes import ntpath import sys import threading import tkinter as tk", "trigger_type): return tk.TRUE if instance is version_entry: if _version_validation(content, trigger_type): return tk.TRUE return", "incremented.\") else: return def _serial_validation(content, trigger_type): if content == \"xxx\" and trigger_type ==", "= True if sys.platform.startswith(\"win\"): height += 530 - 383 elif sys.platform.startswith(\"linux\"): height +=", "if patch != \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\")", "= ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() #", "of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame,", "log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM,", "dialog. Key must be .txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\",", "urlparse from tip import ToolTip from version import Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\",", "# firmware tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label =", "return tk.TRUE if content == \"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type ==", "log.insert(tk.END, \"MAJOR should be a number!\\n\") return tk.FALSE if not minor.isdigit(): if (minor", "vcmd = main_win.register(validation_command) ivcmd = main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"),", "offers GUI for EPCboot. It allows: * browse firmware on PC and load", "\"\": serial_entry.config(font=(\"Calibri Italic\", 10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\")", "(568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks", "content.find(\".\", second_dot_index + 1) # Consider -1 as dot absence if first_dot_index ==", "return if FIRMWARE == \"\": log.insert(tk.END, \"You must specify firmware file.\\n\") return error_text", "def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and changes font If format is", "ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame)", "# # minor correction minor = content[first_dot_index + 1:] if first_dot_index == -1:", "as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting.", "URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is wrong!", "key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key setting.", "\"focusin\": # clears the hint serial_entry.delete(0, tk.END) return tk.TRUE if content == \"xxx\"", "if error_text: log.insert(tk.END, error_text) return # Checking serial and version format if not", "if content == \"xxx\" and trigger_type == \"focusout\": # just leave the entry", "main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE", "going to read binary files (.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH global", "update.\"\"\" global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\")", "collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH", "global URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException: log.insert( tk.END, \"Something is", "def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is correct\"\"\" if content ==", "= ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res", "== \"focusin\": # just enter the entry return tk.TRUE print(content) if content ==", "command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=5) com_label.pack(side=tk.LEFT) combox.pack(side=tk.LEFT,", "== 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware", "-1, -1 # dot indices if second_dot_index == -1: # correction third_dot_index =", "if second_dot_index == -1: # patch = \"\" # # minor correction minor", "ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button", "sys.platform.startswith(\"win\"): height -= (530 - 383) elif sys.platform.startswith(\"linux\"): height -= (568 - 412)", "-1 # if third_dot_index != -1: # there cannot be three or more", "argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust \" \"(developer or", "ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to", "C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END,", "log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens file dialog. Key must", "== -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR", "tk.TRUE print(content) if content == \"\": if trigger_type == \"focusout\": # sets the", "\"Starting identificator and key setting. Port: {}\\n Serial number: \" \"{}\\n Hardware version:", "= tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button = ttk.Button(key_frame, text=\"", "\"\": log.insert(tk.END, \"You must specify firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text:", "main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot {Version.full}\") FIRMWARE = \"\" # string containing firmware", "res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number,", "major = content[second_dot_index + 1:] if not major.isdigit(): log.insert(tk.END, \"MAJOR should be a", "number must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def", "use Linux, open epcboot_gui with \" \"root.\\nIn case of using Windows, make sure", "collapses or expands developer tab\"\"\" global DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width()", "key) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets", "int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\") else: return", "serial_entry.validate() or not version_entry.validate(): return # The statement below is necessary to work", "case of using Windows, make sure that the device is not\" \" used", "= \"\" # minor = \"\" # # major correction major = content[second_dot_index", "specify firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release()", "load it to controller * browse key file (*.txt) and load it to", "as tk from tkinter import filedialog, font, messagebox, scrolledtext, ttk import serial import", "global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break UPDATE_RUNNING =", "padx=26) set_ident_button.pack(side=tk.TOP) # log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD)", "red Calibri 10 If empty: sets hint, grey Calibri Italic 10 \"\"\" instance", "url as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode())", "sets cryptographic key.\"\"\" global KEY global URL if URL.get() == \"\": log.insert(tk.END, \"You", "\"\"\"Starts if validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting certain entry", "\"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of developer tab # creating", "\"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(), version_entry.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url,", "URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint =", "log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in", "ctypes.create_string_buffer(KEY.get().encode()) log.insert(tk.END, \"Starting key setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res =", "+= 530 - 383 elif sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def", "command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) #", "# minor = \"\" # # major correction major = content[second_dot_index + 1:]", "res = epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\")", "\"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()])", "file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state):", "be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content,", "is wrong! If you use Linux, open epcboot_gui with \" \"root.\\nIn case of", "ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() # URL", "geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif sys.platform.startswith(\"linux\"): main_win.geometry(\"640x412\") else: print(\"Unknown system!\") main_win.title(f\"EPCboot", "= ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH = tk.StringVar() # path to firmware firmware_entry = ttk.Entry(firmware_frame,", "serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format", "buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware():", "if KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return error_text = urlparse.validate(URL.get())", "Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res =", "If format uncorrect: red Calibri 10 If empty: sets hint, grey Calibri Italic", "firmware tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame,", "tk.StringVar() # URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen)", "trigger_type == \"focusout\": # just leave the entry return tk.TRUE if content ==", "not patch.isdigit(): log.insert(tk.END, \"PATCH should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\")", "log.insert(tk.END, \"Serial number must be a number!\\n\") return tk.FALSE serial_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return", "log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END)", "serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\")", "== \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if FIRMWARE == \"\":", "log.insert( tk.END, \"Starting identificator and key setting. Port: {}\\n Serial number: \" \"{}\\n", "\"\"\"Function collapses or expands developer tab\"\"\" global DEV_STATE height = main_win.winfo_height() width =", "\"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0,", "ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab, text=\"COM settings\") com_label = ttk.Label(com_frame, text=\"COM port:\") URL =", "green Calibri 10 If format uncorrect: red Calibri 10 If empty: sets hint,", "fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread to update", "of using Windows, make sure that the device is not\" \" used by", "log.insert( tk.END, \"Something is wrong! If you use Linux, open epcboot_gui with \"", "sets the hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE", "of firmware tab # developer tab: developer_tab = ttk.Frame(main_win) key_frame = ttk.Labelframe(developer_tab, text=\"Key\")", "type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\"", "be a number!\\n\") return tk.FALSE if not minor.isdigit(): if (minor == \"\" and", "\"Starting firmware update. Port: {}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END,", "padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab # developer tab: developer_tab =", "# File was opened FIRM_PATH.set(main_win.firmware.name) FIRMWARE = main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\":", "browse firmware on PC and load it to controller * browse key file", "= main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function", "main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE,", "and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to", "filedialog, font, messagebox, scrolledtext, ttk import serial import serial.tools.list_ports import epcbootlib import urlparse", "KEY.get() == \"\": log.insert(tk.END, \"You must specify key.\\n\") return if serial_entry.get() == \"xxx\":", "tab\"\"\" global DEV_STATE height = main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE,", "width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries (serial_entry and", "epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL)", "isn't needed.\"\"\" global FIRM_PATH global FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\",", "content != \"x.x.x\" and trigger_type == \"focusin\": # just enter the entry return", "log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add", "format if not serial_entry.validate() or not version_entry.validate(): return # The statement below is", "hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation to entries", "1:] if first_dot_index == -1: # patch = \"\" # minor = \"\"", "ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update", "firmware on PC and load it to controller * browse key file (*.txt)", "10), foreground=\"grey\") if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if", "dot indices if second_dot_index == -1: # correction third_dot_index = -1 # if", "\"MAJOR should be a number!\\n\") return tk.FALSE if not minor.isdigit(): if (minor ==", "padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP,", "DEV_STATE = False if sys.platform.startswith(\"win\"): height -= (530 - 383) elif sys.platform.startswith(\"linux\"): height", "RUNNING: break UPDATE_RUNNING = True # Button is clicked set_buttons_to_state(tk.DISABLED) # The statement", "(\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close()", "== \"\": if trigger_type == \"focusout\": # sets the hint version_entry.config( font=( \"Calibri", "is correct\"\"\" if content == \"x.x.x\" and trigger_type == \"focusin\": # clears the", "FIRMWARE = \"\" # string containing firmware # firmware tab: firmware_tab = ttk.Frame(main_win)", "# string containing firmware # firmware tab: firmware_tab = ttk.Frame(main_win) com_frame = ttk.Labelframe(firmware_tab,", "given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\"", "infinite loop in the update stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if", "FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"),", "firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab # developer tab: developer_tab", "\"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You", "title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware, type(None)): #", "entry format and changes font If format is ok: green Calibri 10 If", "10), foreground=\"green\") return tk.TRUE def _version_validation(content, trigger_type=\"focusout\"): \"\"\"Returns tk.TRUE if version format is", "log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks an infinite loop in the update", "parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port =", "= epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\")", "\"PATCH should be a number!\\n\") return tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def", "function breaks an infinite loop in the update stream.\"\"\" global RUNNING global UPDATE_LOCK", "underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin = ToolTip(com_hint) if sys.platform.startswith(\"win\"): com_hint.config(text=\"Input format\", foreground=\"grey\") tip_com_hin.set_text(r\"com:\\\\.\\COMx\") elif sys.platform.startswith(\"linux\"):", "not isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets", "return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # The statement below", "1)) log.insert(tk.END, \"Serial number incremented.\") else: return def _serial_validation(content, trigger_type): if content ==", "= ctypes.create_string_buffer(version_entry.get().encode()) log.insert( tk.END, \"Starting identificator and key setting. Port: {}\\n Serial number:", "ttk import serial import serial.tools.list_ports import epcbootlib import urlparse from tip import ToolTip", "(developer only) \"\"\" import argparse import ctypes import ntpath import sys import threading", "sets the given state to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state)", "log.insert(tk.END, \"You must specify firmware file.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END,", "if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE =", "== \"focusout\": # sets the hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END,", "\"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust \" \"(developer or customer)\") args", "padx=14) firmware_browse_button.pack(side=tk.LEFT, padx=5) upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab # developer tab:", "main_win.destroy() # Creating main window main_win = tk.Tk() # Setting window geometry and", "\"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK", "ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT)", "= main_win.register(invalid_command) serial_entry.config(validatecommand=(vcmd, \"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") version_entry.config(validatecommand=(vcmd, \"%W\", \"%P\",", "return tk.TRUE if not content.isdigit(): log.insert(tk.END, \"Serial number must be a number!\\n\") return", "True # Button is clicked set_buttons_to_state(tk.DISABLED) # The statement below is necessary to", "C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port: {}. Firmware", "combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts the firmware update.\"\"\" global UPDATE_LOCK if", "def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get()) test_port.close() except serial.SerialException:", "= True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd = threading.Thread(target=update_firmware) thread_upd.start()", "customer)\") args = parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL", "UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given state to all buttons. :param: given", "sure that the device is not\" \" used by another program.\\n\") if sys.platform.startswith(\"win\"):", "on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This function breaks an infinite loop in the", "log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK", "import ctypes import ntpath import sys import threading import tkinter as tk from", "version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and", "if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given state", "-1: # patch = \"\" # # minor correction minor = content[first_dot_index +", "AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END,", "fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height -=", "return tk.TRUE print(content) if content == \"\": if trigger_type == \"focusout\": # sets", "height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END) log.edit_modified(0) def close_window(): \"\"\"This", "log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread to update firmware RUNNING = True", "getting certain entry if instance is serial_entry: if _serial_validation(content, trigger_type): return tk.TRUE if", "text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) version_entry.insert(tk.END, \"x.x.x\") AUTOINCR =", "# Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\" global URL try: test_port = serial.Serial(port=URL.get())", "a number!\\n\") return tk.FALSE if patch != \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH", "_serial_validation(content, trigger_type): return tk.TRUE if instance is version_entry: if _version_validation(content, trigger_type): return tk.TRUE", "It allows: * browse firmware on PC and load it to controller *", "log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height -= (530 - 383)", "and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be", "# log_frame log_frame = ttk.Labelframe(main_win, text=\"Log\") log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame", "= \"\" # string containing firmware # firmware tab: firmware_tab = ttk.Frame(main_win) com_frame", "False # developer tab state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\",", "combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True)", "main_win.update() res = epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END,", "thread to update firmware RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING =", "0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse(): \"\"\"Opens", "invalid_command(widget_name, content): \"\"\"Starts if validation commands return False.\"\"\" instance = main_win.nametowidget(widget_name) # getting", "increment\", width=30, variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial and hardware version\", width=30,", "tk.FALSE version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format", "version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must specify version.\\n\") return error_text = urlparse.validate(URL.get()) if", "set_buttons_to_state(state): \"\"\"Function sets the given state to all buttons. :param: given state.\"\"\" firmware_browse_button.config(state=state)", "def browse_firmware(): \"\"\"Function opens file dialog. We are going to read binary files", "text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\",", "main_win = tk.Tk() # Setting window geometry and title if sys.platform.startswith(\"win\"): main_win.geometry(\"500x383\") elif", "set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING", "if upd_button.state() == (): # in case of enabled upd_button, method .state() returns", "= ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font) tip_com_hin =", "x) returns -1 if \".\" is not found first_dot_index = content.find(\".\", 0) second_dot_index", "log.insert( tk.END, \"Starting firmware update. Port: {}. Firmware file: {}\\n\". format( URL.get(), ntpath.basename(", "\"You must specify serial number.\\n\") return if version_entry.get() == \"x.x.x\": log.insert(tk.END, \"You must", "RUNNING = True UPDATE_LOCK = threading.Lock() UPDATE_LOCK.acquire() UPDATE_RUNNING = False thread_upd = threading.Thread(target=update_firmware)", "main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global KEY global URL if URL.get()", "upd_button.pack(side=tk.TOP, pady=0) # end of firmware tab # developer tab: developer_tab = ttk.Frame(main_win)", "com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint, com_hint.cget(\"font\")) underlined_font.configure(underline=True) com_hint.configure(font=underlined_font)", "number, hardware version and key.\"\"\" global URL global KEY if URL.get() == \"\":", "content == \"x.x.x\" and trigger_type == \"focusin\": # clears the hint version_entry.delete(0, tk.END)", "number!\\n\") return tk.FALSE if patch != \"\" and not patch.isdigit(): log.insert(tk.END, \"PATCH should", "foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\") FIRM_PATH", "\"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of developer tab # creating collapse", "ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) log.pack(expand=tk.TRUE,", "= -1 # if third_dot_index != -1: # there cannot be three or", "ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware update. Port: {}. Firmware file: {}\\n\". format( URL.get(),", "break UPDATE_RUNNING = True # Button is clicked set_buttons_to_state(tk.DISABLED) # The statement below", "format\", foreground=\"grey\") tip_com_hin.set_text(\"com:///dev/ttyUSBx\\ncom:///dev/ttyACMx\\n\" \"com:///dev/ttySx\") firmware_frame = ttk.Labelframe(firmware_tab, text=\"Firmware update\") firmware_label = ttk.Label(firmware_frame, text=\"Firmware:\")", "port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri", "must specify device URL.\\n\") return if KEY.get() == \"\": log.insert(tk.END, \"You must specify", "pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT)", "width=15, textvariable=URL) combox.bind(\"<<ComboboxSelected>>\", com_chosen) com_hint = ttk.Label(com_frame, font=(\"Calibri Italic\", 10)) underlined_font = font.Font(com_hint,", "main_win.firmware.read() upd_button.config(state=tk.NORMAL) if URL.get() == \"\": combox.focus() else: upd_button.focus() def start_update(): \"\"\"Function starts", "if \".\" is not found first_dot_index = content.find(\".\", 0) second_dot_index = content.find(\".\", first_dot_index", "URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if KEY.get() ==", "key_label = ttk.Label(key_frame, text=\"Key:\") KEY = tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame,", "setting. Port: {}\\n\".format(URL.get())) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if res", "\"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key():", "= content[0:first_dot_index] minor = content[first_dot_index + 1:second_dot_index] patch = content[second_dot_index + 1:] if", "10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should be a number!\\n\") return tk.FALSE if", "that the device is not\" \" used by another program.\\n\") if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get()))", "== \"\": log.insert(tk.END, \"You must specify key.\\n\") return error_text = urlparse.validate(URL.get()) if error_text:", "of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) key_frame.pack(side=tk.TOP, fill=tk.X,", "must specify device URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END, \"You must specify", "developer tab state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer mode\", command=collapse) separator", "uncorrect: red Calibri 10 If empty: sets hint, grey Calibri Italic 10 \"\"\"", "if trigger_type == \"focusout\": serial_entry.delete(0, tk.END) serial_entry.insert(tk.END, \"xxx\") return tk.TRUE if not content.isdigit():", "fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15) set_key_button.pack(side=tk.RIGHT, padx=4) key_browse_button.pack(side=tk.RIGHT) serial_label.pack(expand=tk.FALSE, side=tk.LEFT) serial_entry.pack(side=tk.LEFT, padx=14)", "urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # The statement below is necessary to", "- 383 elif sys.platform.startswith(\"linux\"): height += (568 - 412) main_win.geometry(f\"{width}x{height}\") def on_modification(event=None): log.see(tk.END)", "# path to firmware firmware_entry = ttk.Entry(firmware_frame, textvariable=FIRM_PATH, width=17) firmware_browse_button = ttk.Button(firmware_frame, text=\"Browse...\",", "fill=tk.X, padx=5) # end of collapse button firmware_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.X, anchor=tk.N) collapse_frame.pack(expand=tk.FALSE, side=tk.TOP,", "log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial number, hardware version and key.\"\"\" global URL", "URL = tk.StringVar() # URL of port combox = ttk.Combobox(com_frame, postcommand=_update_combox, width=15, textvariable=URL)", "# sets the hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return", "sys.platform.startswith(\"linux\"): height -= (568 - 412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE,", "set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING global", "serial_label = ttk.Label(serial_frame, text=\"Serial number:\") serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END,", "\"You must specify device URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END, \"You must", "variable=AUTOINCR) set_ident_button = ttk.Button( right_frame, text=\"Set serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE,", "\"\": if trigger_type == \"focusout\": # sets the hint version_entry.config( font=( \"Calibri Italic\",", "trigger_type == \"focusin\": # clears the hint serial_entry.delete(0, tk.END) return tk.TRUE if content", "starts the firmware update.\"\"\" global UPDATE_LOCK if URL.get() == \"\": log.insert(tk.END, \"You must", "entry return tk.TRUE print(content) if content == \"\": if trigger_type == \"focusout\": #", "= urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # The statement below is necessary", "font, messagebox, scrolledtext, ttk import serial import serial.tools.list_ports import epcbootlib import urlparse from", "# File was opened KEY.set(main_win.key_file.read().rstrip()) main_win.key_file.close() def set_key(): \"\"\"Function sets cryptographic key.\"\"\" global", "* browse firmware on PC and load it to controller * browse key", "# clears the hint version_entry.delete(0, tk.END) return tk.TRUE if content != \"x.x.x\" and", "to read binary files (.cod). So .encode() isn't needed.\"\"\" global FIRM_PATH global FIRMWARE", "global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if not", "getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse(): \"\"\"Function collapses or", "if sys.platform.startswith(\"win\"): URL.set(r\"com:\\\\.\\{}\".format(URL.get())) elif sys.platform.startswith(\"linux\"): URL.set(r\"com://{}\".format(URL.get())) if upd_button.state() == (): # in case", "method: dev or cust \" \"(developer or customer)\") args = parser.parse_args() # Event", "a number!\\n\") return tk.FALSE if not minor.isdigit(): if (minor == \"\" and second_dot_index", "the update stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You", "= ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean log\", command=clean_log) log_button_frame.pack(side=tk.BOTTOM, fill=tk.X) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH)", "tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial number incremented.\") else: return def _serial_validation(content,", "\"%W\", \"%P\", \"%V\"), invalidcommand=(ivcmd, \"%W\", \"%P\"), validate=\"all\") # end of developer tab #", "for the update to complete\") return UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating", "ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE,", "or cust \" \"(developer or customer)\") args = parser.parse_args() # Event handlers def", "log = scrolledtext.ScrolledText(log_frame, height=8, wrap=tk.WORD) log.edit_modified(0) log_button_frame = ttk.LabelFrame(log_frame) log_button = ttk.Button(log_button_frame, text=\"Clean", "urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return # Checking serial and version format if", "\"*.txt\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.key_file, type(None)): # File was opened KEY.set(main_win.key_file.read().rstrip())", "= ttk.Label(com_frame, text=\"COM port:\") URL = tk.StringVar() # URL of port combox =", "upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM list.\"\"\" combox.config(values=[comport.device", "must specify version.\\n\") return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return #", "if URL.get() == \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if FIRMWARE", "= argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust \" \"(developer", "tk.TRUE if version format is correct\"\"\" if content == \"x.x.x\" and trigger_type ==", "specify device URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END, \"You must specify firmware", "serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame,", "= ttk.Button(firmware_frame, text=\"Browse...\", width=10, command=browse_firmware) upd_button = ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update)", "\"\" and second_dot_index == -1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return", "list.\"\"\" combox.config(values=[comport.device for comport in serial.tools.list_ports.comports()]) def clean_log(): \"\"\"Cleans log.\"\"\" log.delete('1.0', tk.END) def", "\"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_key(url, key) if res == 0: log.insert(tk.END, \"Ok\\n\")", "if AUTOINCR.get(): serial_number = int(serial_entry.get()) serial_entry.delete(0, tk.END) serial_entry.insert(0, str(serial_number + 1)) log.insert(tk.END, \"Serial", "serial number, hardware version and key.\"\"\" global URL global KEY if URL.get() ==", "Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\", x) returns -1", "DEV_STATE = True if sys.platform.startswith(\"win\"): height += 530 - 383 elif sys.platform.startswith(\"linux\"): height", "UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to wait for the update to complete\") return UPDATE_LOCK.release()", "return tk.TRUE def validation_command(widget_name, content, trigger_type): \"\"\"Checks entry format and changes font If", "main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\")))", "global UPDATE_LOCK global UPDATE_RUNNING global URL while RUNNING: UPDATE_LOCK.acquire() if not RUNNING: break", "second_dot_index = content.find(\".\", first_dot_index + 1) third_dot_index = content.find(\".\", second_dot_index + 1) #", "serial_entry = ttk.Entry(serial_frame, foreground=\"grey\", font=(\"Calibri Italic\", 10)) serial_entry.insert(tk.END, \"xxx\") version_frame = ttk.Frame(left_frame) version_label", "_autoincrement_serial() else: log.insert(tk.END, \"Fail\\n\") def _autoincrement_serial(): global AUTOINCR if AUTOINCR.get(): serial_number = int(serial_entry.get())", "return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function sets the given state to all buttons. :param:", "key_browse_button = ttk.Button(key_frame, text=\"Browse...\", command=key_browse) left_frame = ttk.Frame(ident_frame) right_frame = ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT,", "not major.isdigit(): log.insert(tk.END, \"MAJOR should be a number!\\n\") return tk.FALSE if not minor.isdigit():", "If format is ok: green Calibri 10 If format uncorrect: red Calibri 10", "= main_win.nametowidget(widget_name) # getting certain entry instance.delete(0, tk.END) instance.insert(tk.END, content) instance.config(foreground=\"red\") def collapse():", "UPDATE_LOCK.release() RUNNING = False main_win.destroy() # Creating main window main_win = tk.Tk() #", "# minor correction minor = content[first_dot_index + 1:] if first_dot_index == -1: #", "ttk.Frame(ident_frame) left_frame.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame,", "-1: # second_dot_index, third_dot_index = -1, -1 # dot indices if second_dot_index ==", "global FIRMWARE global URL main_win.firmware = filedialog.askopenfile( mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\",", "tk.TRUE if instance is version_entry: if _version_validation(content, trigger_type): return tk.TRUE return tk.FALSE def", "URL.get(), ntpath.basename( FIRM_PATH.get()))) log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_firmware_update(url, FIRMWARE, len(FIRMWARE)) if", "fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X) key_label.pack(side=tk.LEFT) key_entry.pack(expand=tk.TRUE, side=tk.LEFT, fill=tk.X, padx=15)", "as dot absence if first_dot_index == -1: # second_dot_index, third_dot_index = -1, -1", "controller (developer only) * update serial and version (developer only) \"\"\" import argparse", "three or more dots return tk.FALSE major = content[0:first_dot_index] minor = content[first_dot_index +", "log.insert(tk.END, \"MINOR should be a number!\\n\") return tk.FALSE if patch != \"\" and", "# end of developer tab # creating collapse button DEV_STATE = False #", "if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False", "side=tk.RIGHT, fill=tk.X, padx=5) developer_tab.pack_forget() log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = False if sys.platform.startswith(\"win\"): height", "412) else: separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True", "must be .txt file.\"\"\" global KEY main_win.key_file = filedialog.askopenfile( mode=\"r\", initialdir=\"/\", title=\"Select key\",", "== \"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if KEY.get() == \"\":", "print(content) if content == \"\": if trigger_type == \"focusout\": # sets the hint", "= False # developer tab state collapse_frame = ttk.Frame(main_win) collapse_button = ttk.Button(collapse_frame, text=\"Developer", "\"You need to wait for the update to complete\") return UPDATE_LOCK.release() RUNNING =", "must specify key.\\n\") return if serial_entry.get() == \"xxx\": log.insert(tk.END, \"You must specify serial", "update stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need", "work with url as C char* url = ctypes.create_string_buffer(URL.get().encode()) log.insert( tk.END, \"Starting firmware", "== 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") set_buttons_to_state(tk.NORMAL) UPDATE_RUNNING = False def key_browse():", "log.insert(tk.END, \"Please wait\\n\") main_win.update() res = epcbootlib.urpc_write_ident(url, key, int(serial_entry.get()), version) if res ==", "version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE # the .find(\".\",", "firmware_browse_button.config(state=state) upd_button.config(state=state) collapse_button.config(state=state) key_browse_button.config(state=state) set_key_button.config(state=state) set_ident_button.config(state=state) log_button.config(state=state) def update_firmware(): \"\"\"Updates firmware.\"\"\" global FIRMWARE", "key_browse(): \"\"\"Opens file dialog. Key must be .txt file.\"\"\" global KEY main_win.key_file =", "and key setting. Port: {}\\n Serial number: \" \"{}\\n Hardware version: {}\\n\".format(URL.get(), serial_entry.get(),", "stream.\"\"\" global RUNNING global UPDATE_LOCK global UPDATE_RUNNING if UPDATE_RUNNING: messagebox.showinfo(\"Information\", \"You need to", "version (developer only) \"\"\" import argparse import ctypes import ntpath import sys import", "mode=\"rb\", initialdir=\"/\", title=\"Select firmware\", filetypes=((\"Firmware file\", \"*.cod\"), (\"All files\", \"*.*\"))) if not isinstance(main_win.firmware,", "for EPCboot. It allows: * browse firmware on PC and load it to", "the hint version_entry.config( font=( \"Calibri Italic\", 10), foreground=\"grey\") version_entry.insert(tk.END, \"x.x.x\") return tk.TRUE #", "\"focusin\": # just enter the entry return tk.TRUE print(content) if content == \"\":", "fill=tk.X, padx=5, pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7)", "Version parser = argparse.ArgumentParser() parser.add_argument(\"-m\", \"--method\", choices=(\"dev\", \"cust\"), help=\"Choosing method: dev or cust", "= ttk.Button(firmware_tab, text=\"Update firmware\", state=tk.DISABLED, width=20, command=start_update) com_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=3, ipady=6) firmware_frame.pack(side=tk.TOP,", "return error_text = urlparse.validate(URL.get()) if error_text: log.insert(tk.END, error_text) return UPDATE_LOCK.release() def set_buttons_to_state(state): \"\"\"Function", "if res == 0: log.insert(tk.END, \"Ok\\n\") else: log.insert(tk.END, \"Fail\\n\") def ident_and_key_set(): \"\"\"Sets serial", "def key_browse(): \"\"\"Opens file dialog. Key must be .txt file.\"\"\" global KEY main_win.key_file", "separator.pack_forget() log_frame.pack_forget() developer_tab.pack(expand=tk.FALSE, side=tk.TOP, fill=tk.BOTH) log_frame.pack(expand=tk.TRUE, side=tk.BOTTOM, fill=tk.BOTH) DEV_STATE = True if sys.platform.startswith(\"win\"):", "-1 and trigger_type != \"focusout\"): version_entry.config(font=(\"Calibri\", 10), foreground=\"green\") return tk.TRUE log.insert(tk.END, \"MINOR should", "file offers GUI for EPCboot. It allows: * browse firmware on PC and", "= main_win.nametowidget(widget_name) # getting certain entry if instance is serial_entry: if _serial_validation(content, trigger_type):", "\"\": log.insert(tk.END, \"You must specify device URL.\\n\") return if FIRMWARE == \"\": log.insert(tk.END,", "firmware.\"\"\" global FIRMWARE global FIRM_PATH global RUNNING global UPDATE_LOCK global UPDATE_RUNNING global URL", "as C char* url = ctypes.create_string_buffer(URL.get().encode()) key = ctypes.create_string_buffer(KEY.get().encode()) version = ctypes.create_string_buffer(version_entry.get().encode()) log.insert(", "= ttk.Frame(left_frame) version_label = ttk.Label(version_frame, text=\"HW version:\") version_entry = ttk.Entry(version_frame, foreground=\"grey\", font=(\"Calibri Italic\",", "side=tk.LEFT, fill=tk.BOTH) right_frame.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.BOTH) serial_frame = ttk.Frame(left_frame) serial_label = ttk.Label(serial_frame, text=\"Serial number:\")", "tk.END) def browse_firmware(): \"\"\"Function opens file dialog. We are going to read binary", "empty tuple upd_button.focus() else: firmware_browse_button.focus() log.insert(tk.END, \"{} is chosen!\\n\".format(combox.get())) def _update_combox(): \"\"\"Updates COM", "key file (*.txt) and load it to controller (developer only) * update serial", "serial and hardware version\", width=30, command=ident_and_key_set) set_autoincrement_button.pack(expand=tk.TRUE, side=tk.TOP) set_ident_button.pack(expand=tk.TRUE, side=tk.BOTTOM) # Setting validation", "\"(developer or customer)\") args = parser.parse_args() # Event handlers def com_chosen(event=None): \"\"\"Sets URL.\"\"\"", "height = main_win.winfo_height() width = main_win.winfo_width() if DEV_STATE: log_frame.pack_forget() separator.pack(expand=tk.TRUE, side=tk.RIGHT, fill=tk.X, padx=5)", "text=\"Key:\") KEY = tk.StringVar() # cryptographic key key_entry = ttk.Entry(key_frame, textvariable=KEY) set_key_button =", "\"\" # # minor correction minor = content[first_dot_index + 1:] if first_dot_index ==", "pady=3, ipady=4) ident_frame.pack(expand=tk.FALSE, side=tk.TOP, padx=5, fill=tk.BOTH, pady=3, ipady=4) serial_frame.pack(side=tk.TOP, fill=tk.X, pady=7) version_frame.pack(side=tk.TOP, fill=tk.X)", "import sys import threading import tkinter as tk from tkinter import filedialog, font,", "side=tk.BOTTOM, fill=tk.BOTH) log_button.pack(side=tk.RIGHT) log.bind(\"<<Modified>>\", on_modification) # Add a thread to update firmware RUNNING" ]
[ "= 20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) / sr plt.plot(time_y,", "clip in clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips - 1) * cross_fade_samples", "# if not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples = 0 for clip", "/ 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples", "= scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x,", "and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out] *=", "Quick list-of-clips demo. tmp = [] for i in range(20): tmp.append(x[i * 1000:(i", "write_in = ramp_in return y if __name__ == '__main__': import matplotlib.pyplot as plt", "len(clip) total_overlap_samples = (num_clips - 1) * cross_fade_samples num_samples = raw_num_samples - total_overlap_samples", "Update pointers. ramp_in = write_out - cross_fade_samples ramp_out = write_out # Fade in", "numpy as np def apply_cross_fade(clips, cross_fade_ms, sr): \"\"\"Concatenate audio clips with a cross", "= np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples", "1000:(i + 1) * 1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms, sr)", "= raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in = 0 for clip in", "clips: write_out = write_in + len(clip) # Update pointers. ramp_in = write_out -", "demo. tmp = [] for i in range(20): tmp.append(x[i * 1000:(i + 1)", "= write_out - cross_fade_samples ramp_out = write_out # Fade in and place. clip[:cross_fade_samples]", "*= (1 - fade_ramp) # Advance write pointer. write_in = ramp_in return y", "cross_fade_samples ramp_out = write_out # Fade in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out]", "if not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples = 0 for clip in", "label='Original') # Quick list-of-clips demo. tmp = [] for i in range(20): tmp.append(x[i", "0 for clip in clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips - 1)", "+= 1 raw_num_samples = 0 for clip in clips: raw_num_samples += len(clip) total_overlap_samples", "as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x", "1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) /", "= [] for i in range(20): tmp.append(x[i * 1000:(i + 1) * 1000])", "if __name__ == '__main__': import matplotlib.pyplot as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\"", "np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') # Quick list-of-clips demo.", "not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples = 0 for clip in clips:", "in clips: write_out = write_in + len(clip) # Update pointers. ramp_in = write_out", "import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x = scipy.io.wavfile.read(file_path)", "# Advance write pointer. write_in = ramp_in return y if __name__ == '__main__':", "in clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips - 1) * cross_fade_samples num_samples", "write_out = write_in + len(clip) # Update pointers. ramp_in = write_out - cross_fade_samples", "ramp_in = write_out - cross_fade_samples ramp_out = write_out # Fade in and place.", "clip # Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance write pointer.", "sr): \"\"\"Concatenate audio clips with a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples =", "= 0 for clip in clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips -", "cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples #", "= int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if", "x = x / np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x, x, label='Original')", "scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x = scipy.io.wavfile.read(file_path) x", "# Test audio file. sr, x = scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max", "raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in = 0 for clip in clips:", "fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp =", "out. y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance write pointer. write_in = ramp_in", "total_overlap_samples = (num_clips - 1) * cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y", "apply_cross_fade(clips, cross_fade_ms, sr): \"\"\"Concatenate audio clips with a cross fade.\"\"\" num_clips = len(clips)", "apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) / sr plt.plot(time_y, y, label='Cross fade') plt.show()", "list-of-clips demo. tmp = [] for i in range(20): tmp.append(x[i * 1000:(i +", "for clip in clips: write_out = write_in + len(clip) # Update pointers. ramp_in", "Fade in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip # Fade out.", "\"\"\"Concatenate audio clips with a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms", "np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') # Quick list-of-clips demo. tmp = []", "# Quick list-of-clips demo. tmp = [] for i in range(20): tmp.append(x[i *", "scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x, x,", "+ 1) * 1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y", "total_overlap_samples y = np.zeros(num_samples) write_in = 0 for clip in clips: write_out =", "len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples", "tmp = [] for i in range(20): tmp.append(x[i * 1000:(i + 1) *", "for i in range(20): tmp.append(x[i * 1000:(i + 1) * 1000]) cross_fade_ms =", "write_in = 0 for clip in clips: write_out = write_in + len(clip) #", "cross_fade_samples += 1 raw_num_samples = 0 for clip in clips: raw_num_samples += len(clip)", "sr plt.plot(time_x, x, label='Original') # Quick list-of-clips demo. tmp = [] for i", "= len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp = np.arange(cross_fade_samples) /", "is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples = 0 for clip in clips: raw_num_samples", "clip in clips: write_out = write_in + len(clip) # Update pointers. ramp_in =", "1) * 1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y =", "np def apply_cross_fade(clips, cross_fade_ms, sr): \"\"\"Concatenate audio clips with a cross fade.\"\"\" num_clips", "= ramp_in return y if __name__ == '__main__': import matplotlib.pyplot as plt import", "write_in + len(clip) # Update pointers. ramp_in = write_out - cross_fade_samples ramp_out =", "/ np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') # Quick list-of-clips", "time_x = np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') # Quick list-of-clips demo. tmp", "return y if __name__ == '__main__': import matplotlib.pyplot as plt import scipy.io.wavfile file_path", "a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000))", "= write_out # Fade in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip", "= np.zeros(num_samples) write_in = 0 for clip in clips: write_out = write_in +", "num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp = np.arange(cross_fade_samples)", "y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance", "1) * cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in =", "+= clip # Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance write", "# cross_fade_samples += 1 raw_num_samples = 0 for clip in clips: raw_num_samples +=", "- cross_fade_samples ramp_out = write_out # Fade in and place. clip[:cross_fade_samples] *= fade_ramp", "fade_ramp) # Advance write pointer. write_in = ramp_in return y if __name__ ==", "y if __name__ == '__main__': import matplotlib.pyplot as plt import scipy.io.wavfile file_path =", "/ cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples = 0", "place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out] *= (1", "tmp.append(x[i * 1000:(i + 1) * 1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp,", "plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x =", "x = scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr", "with a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr /", "(1 - fade_ramp) # Advance write pointer. write_in = ramp_in return y if", "x, label='Original') # Quick list-of-clips demo. tmp = [] for i in range(20):", "i in range(20): tmp.append(x[i * 1000:(i + 1) * 1000]) cross_fade_ms = 20", "sr, x = scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x = np.arange(len(x)) /", "- fade_ramp) # Advance write pointer. write_in = ramp_in return y if __name__", "20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) / sr plt.plot(time_y, y,", "num_samples = raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in = 0 for clip", "# Fade in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip # Fade", "== '__main__': import matplotlib.pyplot as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test", "= x / np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') #", "pointer. write_in = ramp_in return y if __name__ == '__main__': import matplotlib.pyplot as", "1 raw_num_samples = 0 for clip in clips: raw_num_samples += len(clip) total_overlap_samples =", "Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance write pointer. write_in =", "plt.plot(time_x, x, label='Original') # Quick list-of-clips demo. tmp = [] for i in", "+= len(clip) total_overlap_samples = (num_clips - 1) * cross_fade_samples num_samples = raw_num_samples -", "/ sr plt.plot(time_x, x, label='Original') # Quick list-of-clips demo. tmp = [] for", "write_out - cross_fade_samples ramp_out = write_out # Fade in and place. clip[:cross_fade_samples] *=", "\"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x = scipy.io.wavfile.read(file_path) x = x /", "cross fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp", "* 1000:(i + 1) * 1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms,", "np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples =", "write_out # Fade in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip #", "cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in = 0 for", "in range(20): tmp.append(x[i * 1000:(i + 1) * 1000]) cross_fade_ms = 20 y", "raw_num_samples += len(clip) total_overlap_samples = (num_clips - 1) * cross_fade_samples num_samples = raw_num_samples", "Advance write pointer. write_in = ramp_in return y if __name__ == '__main__': import", "Test audio file. sr, x = scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x", "*= fade_ramp y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp)", "ramp_out = write_out # Fade in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] +=", "cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) / sr", "- 1) * cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in", "def apply_cross_fade(clips, cross_fade_ms, sr): \"\"\"Concatenate audio clips with a cross fade.\"\"\" num_clips =", "ramp_in return y if __name__ == '__main__': import matplotlib.pyplot as plt import scipy.io.wavfile", "# Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance write pointer. write_in", "__name__ == '__main__': import matplotlib.pyplot as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" #", "fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples += 1", "cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples += 1 raw_num_samples = 0 for", "= write_in + len(clip) # Update pointers. ramp_in = write_out - cross_fade_samples ramp_out", "audio clips with a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms *", "write pointer. write_in = ramp_in return y if __name__ == '__main__': import matplotlib.pyplot", "- total_overlap_samples y = np.zeros(num_samples) write_in = 0 for clip in clips: write_out", "raw_num_samples = 0 for clip in clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips", "[] for i in range(20): tmp.append(x[i * 1000:(i + 1) * 1000]) cross_fade_ms", "as np def apply_cross_fade(clips, cross_fade_ms, sr): \"\"\"Concatenate audio clips with a cross fade.\"\"\"", "pointers. ramp_in = write_out - cross_fade_samples ramp_out = write_out # Fade in and", "audio file. sr, x = scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x =", "* sr / 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples):", "fade_ramp y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out] *= (1 - fade_ramp) #", "file. sr, x = scipy.io.wavfile.read(file_path) x = x / np.iinfo(np.int16).max time_x = np.arange(len(x))", "y[ramp_in:ramp_out] *= (1 - fade_ramp) # Advance write pointer. write_in = ramp_in return", "in and place. clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out]", "'__main__': import matplotlib.pyplot as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio", "cross_fade_ms, sr): \"\"\"Concatenate audio clips with a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples", "clips with a cross fade.\"\"\" num_clips = len(clips) cross_fade_samples = int(np.floor(cross_fade_ms * sr", "y = np.zeros(num_samples) write_in = 0 for clip in clips: write_out = write_in", "0 for clip in clips: write_out = write_in + len(clip) # Update pointers.", "+ len(clip) # Update pointers. ramp_in = write_out - cross_fade_samples ramp_out = write_out", "* 1000]) cross_fade_ms = 20 y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y))", "sr / 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples): #", "= np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') # Quick list-of-clips demo. tmp =", "matplotlib.pyplot as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr,", "import numpy as np def apply_cross_fade(clips, cross_fade_ms, sr): \"\"\"Concatenate audio clips with a", "x / np.iinfo(np.int16).max time_x = np.arange(len(x)) / sr plt.plot(time_x, x, label='Original') # Quick", "int(np.floor(cross_fade_ms * sr / 1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if not", "range(20): tmp.append(x[i * 1000:(i + 1) * 1000]) cross_fade_ms = 20 y =", "y = apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) / sr plt.plot(time_y, y, label='Cross", "= \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x = scipy.io.wavfile.read(file_path) x = x", "# Update pointers. ramp_in = write_out - cross_fade_samples ramp_out = write_out # Fade", "= apply_cross_fade(tmp, cross_fade_ms, sr) time_y = np.arange(len(y)) / sr plt.plot(time_y, y, label='Cross fade')", "clip[:cross_fade_samples] *= fade_ramp y[write_in:write_out] += clip # Fade out. y[ramp_in:ramp_out] *= (1 -", "1000)) fade_ramp = np.arange(cross_fade_samples) / cross_fade_samples # if not is_even(cross_fade_samples): # cross_fade_samples +=", "(num_clips - 1) * cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y = np.zeros(num_samples)", "len(clip) # Update pointers. ramp_in = write_out - cross_fade_samples ramp_out = write_out #", "for clip in clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips - 1) *", "import matplotlib.pyplot as plt import scipy.io.wavfile file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file.", "= (num_clips - 1) * cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y =", "clips: raw_num_samples += len(clip) total_overlap_samples = (num_clips - 1) * cross_fade_samples num_samples =", "np.zeros(num_samples) write_in = 0 for clip in clips: write_out = write_in + len(clip)", "= 0 for clip in clips: write_out = write_in + len(clip) # Update", "* cross_fade_samples num_samples = raw_num_samples - total_overlap_samples y = np.zeros(num_samples) write_in = 0", "file_path = \"../audio/008-you-possess-the-treasure-you-seek-seed001.wav\" # Test audio file. sr, x = scipy.io.wavfile.read(file_path) x =" ]
[ "pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for", "multiprocessing import Pool from itertools import product from numba import njit from functools", "np.max(Z, axis = 0, keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0)", "data def square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join()", "y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data)", "in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i in m1_] r4=[i[3]for i in", "m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i in m1_] r7=[i[3]for i in m1_]", "pool.join() return x def softmax(Z): e_Z = np.exp(Z - np.max(Z, axis = 0,", "print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights))", "k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0]", "print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights)", "u1=[i[2]for i in m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0", "in m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape)", "n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures =", "#print(n) output=[] iters = 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path)", "return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def", "i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data):", "multiprocessing from multiprocessing import Pool from itertools import product from numba import njit", "path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n)", "weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2)", "m1_] u2=[i[2]for i in m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join()", "return data def parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x", "\"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001:", "pos6=[i[1]for i in m1_] u6=[i[2]for i in m1_] r6=[i[3]for i in m1_] pool=Pool(4)", "pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for i in m1_]", "m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close()", "pool.close() pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i", "from numba import njit from functools import partial import math import sklearn from", "conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in m1_]", "images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures", "# print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T", "1 return images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)):", "y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1 return images,output def convert(l): return (*l,)", "m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i in m1_] r2=[i[3]for i in m1_]", "pool.close() pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i", "conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for i in", "y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08):", "range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3)", "sklearn from sklearn import linear_model def load_images_from_folder(folder): images = [] for filename in", "+= 1 return images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i in", "pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for i", "data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data", "pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def softmax(Z): e_Z = np.exp(Z -", "u2=[i[2]for i in m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0", "in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for", "pool.close() pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i", "return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i", "0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def", "pos1=[i[1]for i in m1_] u1=[i[2]for i in m1_] r1=[i[3]for i in m1_] pool=Pool(4)", "#print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def train_with_sklearn(folder,reshape_dim,x1,x2,x3): data,output=load_data(folder) data=parallel_2(data,reshape_dim) y1,y2,y3=softmax_regression_2(data,output,x1,x2,x3) return y1,y2,y3", "in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start() for x in process:", "r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join()", "cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output))", "pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for i in m1_]", "load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters +=", "in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i in m1_] r8=[i[3]for i in", "data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): #", "i in m1_] u4=[i[2]for i in m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4)", "i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0]", "def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1)))", "weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i)", "pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i in", "import partial import math import sklearn from sklearn import linear_model def load_images_from_folder(folder): images", "r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim)", "predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights)", "k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for", "from itertools import product from numba import njit from functools import partial import", "pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in", "in m1_] u6=[i[2]for i in m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close()", "#print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0])))", "in m1_] u4=[i[2]for i in m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close()", "softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for", "i in m1_] u1=[i[2]for i in m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1)", "def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3)", "y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch):", "m1_] u3=[i[2]for i in m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join()", "i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch)", "pool.close() pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i", "u3=[i[2]for i in m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0", "in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for", "for i in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i in m1_] r7=[i[3]for", "=1 y.reshape(1,n) output.append(y) iters += 1 return images,output def convert(l): return (*l,) def", "return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for filename", "return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def", "#print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def train_with_sklearn(folder,reshape_dim,x1,x2,x3): data,output=load_data(folder) data=parallel_2(data,reshape_dim) y1,y2,y3=softmax_regression_2(data,output,x1,x2,x3)", "data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for i", "i in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i in m1_] r8=[i[3]for i", "in m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2)", "return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4)", "pos8=[i[1]for i in m1_] u8=[i[2]for i in m1_] r8=[i[3]for i in m1_] def", "i in m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4)", "pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for i", "# weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3)", "m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close()", "output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch):", "pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i in", "pos7=[i[1]for i in m1_] u7=[i[2]for i in m1_] r7=[i[3]for i in m1_] pool=Pool(4)", "e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)),", "- np.max(Z, axis = 0, keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis =", "m3=[i[0] for i in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i in m1_]", "process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start() for", "return x def softmax(Z): e_Z = np.exp(Z - np.max(Z, axis = 0, keepdims", "print(x) pool.close() pool.join() return x def softmax(Z): e_Z = np.exp(Z - np.max(Z, axis", "(*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i):", "range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1)", "pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in", "def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate):", "i in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i in m1_] r6=[i[3]for i", "= load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters", "pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i in", "y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3)", "softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape)", "i in m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4)", "m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i", "for x in process: x.join() for i in data: print(i.shape) return data def", "i in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i in m1_] r7=[i[3]for i", "m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i", "i in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i in m1_] r3=[i[3]for i", "process: x.join() for i in data: print(i.shape) return data def square(x): return x**2", "model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if", "m1_] u6=[i[2]for i in m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join()", "i in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i in m1_] r1=[i[3]for i", "def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return", "range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start() for x in process: x.join()", "m1=[i[0] for i in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i in m1_]", "for i in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i in m1_] r1=[i[3]for", "conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for i in", "pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1 return images,output def", "u6=[i[2]for i in m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0", "pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for i", "x.start() for x in process: x.join() for i in data: print(i.shape) return data", "i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0]", "for i in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i in m1_] r8=[i[3]for", "data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim):", "m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for", "parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def softmax(Z): e_Z =", "in m1_] u2=[i[2]for i in m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close()", "pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1 return", "in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for", "- np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights", "m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch):", "x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def softmax(Z):", "weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return", "load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename", "parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start()", "m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i", "#print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial')", "x in process: x.start() for x in process: x.join() for i in data:", "i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0]", "conv import * import multiprocessing from multiprocessing import Pool from itertools import product", "in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for", "pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i in", "m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i in m1_] r4=[i[3]for i in m1_]", "m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i in m1_] r5=[i[3]for i in m1_]", "x in process: x.join() for i in data: print(i.shape) return data def square(x):", "data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate)", "conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in m1_]", "* import multiprocessing from multiprocessing import Pool from itertools import product from numba", "#print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape)", "m2=[i[0] for i in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i in m1_]", "pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i in", "pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i in", "pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in", "k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i", "m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i in m1_] r1=[i[3]for i in m1_]", "conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in m1_]", "= 0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y])", "for i in data: print(i.shape) return data def square(x): return x**2 def parallel_2(data,reshape_dim):", "for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in pictures: images.append(pics)", "pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for i in m1_]", "i in m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4)", "k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close()", "predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights", "pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for i in m1_]", "i in m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4)", "pool.close() pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i", "#for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights", "#print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon))", "filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters", "import njit from functools import partial import math import sklearn from sklearn import", "def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape)", "output.append(y) iters += 1 return images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for", "pictures = load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y)", "m1_] u1=[i[2]for i in m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join()", "pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for i", "in m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4)", "import os from conv import * import multiprocessing from multiprocessing import Pool from", "pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for i", "softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights)", "numba import njit from functools import partial import math import sklearn from sklearn", "convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data", "in m1_] u3=[i[2]for i in m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close()", "i in m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape)", "images = [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[]", "x def softmax(Z): e_Z = np.exp(Z - np.max(Z, axis = 0, keepdims =", "m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close()", "square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x", "print(i.shape) return data def square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x)", "pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for i in m1_]", "np import cv2 import os from conv import * import multiprocessing from multiprocessing", "images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for", "for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def", "os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1", "return data def square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close()", "m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i", "m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close()", "r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join()", "in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for", "in process: x.start() for x in process: x.join() for i in data: print(i.shape)", "r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join()", "m4=[i[0] for i in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i in m1_]", "in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return", "in process: x.join() for i in data: print(i.shape) return data def square(x): return", "from functools import partial import math import sklearn from sklearn import linear_model def", "#print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return", "m8=[i[0] for i in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i in m1_]", "def square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return", "m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close()", "def parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process:", "data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p)", "conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for i in", "pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for i in m1_]", "conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in m1_]", "conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in m1_]", "itertools import product from numba import njit from functools import partial import math", "u8=[i[2]for i in m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output)", "pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for", "u4=[i[2]for i in m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0", "[] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n)", "filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1))", "iters += 1 return images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i", "data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for", "i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start() for x in", "pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)):", "import math import sklearn from sklearn import linear_model def load_images_from_folder(folder): images = []", "in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for i in m1_] r1=[i[3]for i in", "m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close()", "data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return", "= 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in", "i in m1_] u6=[i[2]for i in m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6)", "m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for", "import cv2 import os from conv import * import multiprocessing from multiprocessing import", "product from numba import njit from functools import partial import math import sklearn", "from sklearn import linear_model def load_images_from_folder(folder): images = [] for filename in os.listdir(folder):", "conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for i in", "prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i))", "pool.join() m6=[i[0] for i in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i in", "= np.exp(Z - np.max(Z, axis = 0, keepdims = True,initial=-np.inf)) return e_Z /", "np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output)", "x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def softmax(Z): e_Z = np.exp(Z - np.max(Z,", "output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in", "i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: #", "i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3)", "pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for i", "in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i in m1_] r5=[i[3]for i in", "numpy as np import cv2 import os from conv import * import multiprocessing", "conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for i in", "pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for", "pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for i", "m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights", "pool.close() pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i", "pos5=[i[1]for i in m1_] u5=[i[2]for i in m1_] r5=[i[3]for i in m1_] pool=Pool(4)", "for i in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i in m1_] r6=[i[3]for", "= True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X) def", "i in m1_] u2=[i[2]for i in m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2)", "weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3)", "in m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6)", "in m1_] u1=[i[2]for i in m1_] r1=[i[3]for i in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close()", "pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for", "i in m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4)", "from multiprocessing import Pool from itertools import product from numba import njit from", "k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4)", "k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0", "m1_] u8=[i[2]for i in m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13]", "in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i in m1_] r6=[i[3]for i in", "in data: print(i.shape) return data def square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4)", "Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1)))", "pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for i in m1_]", "= 0, keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0) def predict(X,weights):", "conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in m1_]", "u5=[i[2]for i in m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0", "i in m1_] u3=[i[2]for i in m1_] r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3)", "njit from functools import partial import math import sklearn from sklearn import linear_model", "in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for", "#print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T))", "r3=[i[3]for i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join()", "m1_] u4=[i[2]for i in m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join()", "def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[]", "prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for", "weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T", "x.join() for i in data: print(i.shape) return data def square(x): return x**2 def", "in m1_] pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for", "i in m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4)", "functools import partial import math import sklearn from sklearn import linear_model def load_images_from_folder(folder):", "pos3=[i[1]for i in m1_] u3=[i[2]for i in m1_] r3=[i[3]for i in m1_] pool=Pool(4)", "os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0", "data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1)", "for i in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i in m1_] r5=[i[3]for", "output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i", "m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i in m1_] r8=[i[3]for i in m1_]", "y.reshape(1,n) output.append(y) iters += 1 return images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim):", "m1_] u7=[i[2]for i in m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join()", "data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for", "weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape)", "m5=[i[0] for i in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i in m1_]", "i in m1_] u7=[i[2]for i in m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7)", "#for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3)", "partial import math import sklearn from sklearn import linear_model def load_images_from_folder(folder): images =", "for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1", "m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i", "math import sklearn from sklearn import linear_model def load_images_from_folder(folder): images = [] for", "i in m1_] u5=[i[2]for i in m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5)", "data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for i in", "m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0] for i", "import linear_model def load_images_from_folder(folder): images = [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return", "#data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def train_with_sklearn(folder,reshape_dim,x1,x2,x3): data,output=load_data(folder) data=parallel_2(data,reshape_dim)", "n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights def", "x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def softmax(Z): e_Z = np.exp(Z", "for i in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i in m1_] r2=[i[3]for", "pool=Pool(4) conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in", "data: print(i.shape) return data def square(x): return x**2 def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data)", "#print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def train_with_sklearn(folder,reshape_dim,x1,x2,x3): data,output=load_data(folder) data=parallel_2(data,reshape_dim) y1,y2,y3=softmax_regression_2(data,output,x1,x2,x3) return", "True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat,", "i in m1_] pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0]", "i in m1_] u8=[i[2]for i in m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch):", "range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) #", "output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in", "weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g)", "m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for", "i in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i in m1_] r5=[i[3]for i", "output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n", "conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in m1_]", "m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for", "i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0]", "#if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1)", "import * import multiprocessing from multiprocessing import Pool from itertools import product from", "i in m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4)", "cv2 import os from conv import * import multiprocessing from multiprocessing import Pool", "pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for", "data def parallel(data,reshape_dim): process=[] for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in", "process: x.start() for x in process: x.join() for i in data: print(i.shape) return", "k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join()", "keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X)", "v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data)", "weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in", "in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:]", "m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T", "conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i in m1_] pos3=[i[1]for i in", "m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T", "r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join()", "return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g", "axis = 0, keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0) def", "print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T", "pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for i in m1_]", "v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output)", "def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def", "weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for", "pool.close() pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for i in m1_] u5=[i[2]for i", "print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return", "i in data: print(i.shape) return data def square(x): return x**2 def parallel_2(data,reshape_dim): x=0", "m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3) pool.close() pool.join() m3=[i[0] for i", "in pictures: images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1 return images,output", "i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0]", "in m1_] u7=[i[2]for i in m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close()", "sklearn import linear_model def load_images_from_folder(folder): images = [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0))", "m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for", "def parallel_2(data,reshape_dim): x=0 pool=Pool(4) x=pool.map(prepare_2,data) print(x) pool.close() pool.join() return x def softmax(Z): e_Z", "pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in m1_] pos5=[i[1]for i", "conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for i in", "load_images_from_folder(folder): images = [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder):", "output=[] iters = 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for", "for i in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i in m1_] r4=[i[3]for", "return softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape)", "conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for i in", "return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1)", "# break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial')", "conv2=pool.map(partial(conv_layer,kernel=k2),m1) pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in m1_]", "m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for", "np.exp(Z - np.max(Z, axis = 0, keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis", "predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights)", "g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def", "in m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1)", "def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close()", "pos2=[i[1]for i in m1_] u2=[i[2]for i in m1_] r2=[i[3]for i in m1_] pool=Pool(4)", "print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output)))", "pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for i in m1_] u8=[i[2]for i in", "pool.close() pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i", "import product from numba import njit from functools import partial import math import", "in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i in m1_] r3=[i[3]for i in", "data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def train_with_sklearn(folder,reshape_dim,x1,x2,x3): data,output=load_data(folder)", "return e_Z / e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y):", "p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start() for x in process: x.join() for", "break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data) data=data.reshape(data.shape[0],data.shape[1]).T weights=np.zeros((len(data),len(output))) model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output)", "def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for filename in os.listdir(folder):", "y): return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T)", "k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1)", "def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA))", "def softmax(Z): e_Z = np.exp(Z - np.max(Z, axis = 0, keepdims = True,initial=-np.inf))", "pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in", "images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1)", "data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat))", "in range(epoch): predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i)", "np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def", "pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join() m5=[i[0] for i in", "in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i in m1_] r2=[i[3]for i in", "= [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder))", "CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3) k6=np.random.rand(3,3) k7=np.random.rand(3,3) k8=np.random.rand(3,3) pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join()", "pool.close() pool.join() return x def softmax(Z): e_Z = np.exp(Z - np.max(Z, axis =", "Pool from itertools import product from numba import njit from functools import partial", "r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join()", "r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5) pool.close() pool.join()", "def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0 weights=np.zeros((len(data[0]),len(output[0]))) model=linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') \"\"\"for i in range(epoch):", "e_Z = np.exp(Z - np.max(Z, axis = 0, keepdims = True,initial=-np.inf)) return e_Z", "y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3)", "for x in process: x.start() for x in process: x.join() for i in", "in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters =", "in m1_] r2=[i[3]for i in m1_] pool=Pool(4) conv3=pool.map(partial(conv_layer,kernel=k3),m2) pool.close() pool.join() conv3[conv3<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv3)", "v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat) #print(type(((lr*m_hat)/(np.sqrt(v_hat)+epsilon)).T)) weights=weights-((lr*m_hat)/(np.sqrt(v_hat)+epsilon)) return weights def softmax_regression(data,output,learning_rate,epoch): data_hat=np.array(data) data_hat=data_hat.reshape(data_hat.shape[0],data_hat.shape[1]).T output_hat=np.array(output) output_hat=output_hat.reshape(output_hat.shape[0],output_hat.shape[1]).T pre_weights=0", "for i in m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i in m1_] r3=[i[3]for", "0, keepdims = True,initial=-np.inf)) return e_Z / e_Z.sum(axis = 0) def predict(X,weights): return", "m1_] pos3=[i[1]for i in m1_] u3=[i[2]for i in m1_] r3=[i[3]for i in m1_]", "pool.close() pool.join() conv2[conv2<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv2) pool.close() pool.join() m2=[i[0] for i in m1_] pos2=[i[1]for", "linear_model def load_images_from_folder(folder): images = [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images", "i in m1_] pos2=[i[1]for i in m1_] u2=[i[2]for i in m1_] r2=[i[3]for i", "images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[] iters = 0 for filename in", "for i in range(len(data)): p=multiprocessing.Process(target=prepare,args=(data,reshape_dim,i)) process.append(p) for x in process: x.start() for x", "y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1 return images,output def convert(l): return", "def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape)", "in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3):", "in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i in m1_] r7=[i[3]for i in", "images.append(pics) y=np.zeros((n,1)) y[iters,:] =1 y.reshape(1,n) output.append(y) iters += 1 return images,output def convert(l):", "m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in m1_] pos1=[i[1]for i in m1_] u1=[i[2]for", "def load_images_from_folder(folder): images = [] for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def", "os from conv import * import multiprocessing from multiprocessing import Pool from itertools", "return - np.log(y_hat[range(len(y_hat)), y]) def update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return", "e_Z / e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return", "i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join() conv6[conv6<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv6) pool.close() pool.join() m6=[i[0]", "return (*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def", "pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7) pool.close() pool.join() m7=[i[0] for i in m1_] pos7=[i[1]for", "from conv import * import multiprocessing from multiprocessing import Pool from itertools import", "train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1]) data=parallel_2(data,reshape_dim) weights=softmax_regression(data,output,learning_rate,epoch) return weights def train_with_sklearn(folder,reshape_dim,x1,x2,x3):", "#print(features.shape) #print(output.shape) #print(weights) #print(type(weights)) predicted=predict(features,weights) g=(-(output-predicted)@features.T).T m=np.zeros(weights.shape) v=np.zeros(weights.shape) m=beta1*m+(1-beta1)*g v=beta2*v+(1-beta2)*(g*g) m_hat=m/(1-(beta1**(t+1))) v_hat=v/(1-(beta2**(t+1))) #print(m_hat,v_hat)", "import Pool from itertools import product from numba import njit from functools import", "pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m8=[i[0] for i in m1_] pos8=[i[1]for", "u7=[i[2]for i in m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0", "predicted=predict(data_hat,weights) print(np.linalg.norm(predicted-output_hat)) #for n in np.random.permutation(len(output)): weights=Adam(data_hat,output_hat,weights,learning_rate,i) #if np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\"", "np.linalg.norm(weights-pre_weights)<0.0001: # print(i) # break\"\"\" return weights def softmax_regression_2(data,output,x1,x2,x3): output=np.asarray(output) output=output.reshape(output.shape[0],output.shape[1]).T output=output.reshape(-1) data=np.asarray(data)", "import numpy as np import cv2 import os from conv import * import", "in m1_] r6=[i[3]for i in m1_] pool=Pool(4) conv7=pool.map(partial(conv_layer,kernel=k7),m6) pool.close() pool.join() conv7[conv7<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv7)", "model=sklearn.linear_model.LogisticRegression(C=1e5,solver='lbfgs',multi_class='multinomial') model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3", "def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA)) data=data.reshape(data.size,1) return data def parallel(data,reshape_dim): process=[] for i in range(len(data)):", "m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for", "in m1_] u8=[i[2]for i in m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder)", "/ e_Z.sum(axis = 0) def predict(X,weights): return softmax(weights.T@X) def cross_entropy(y_hat, y): return -", "m6=[i[0] for i in m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i in m1_]", "i in m1_] pos4=[i[1]for i in m1_] u4=[i[2]for i in m1_] r4=[i[3]for i", "0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics in pictures:", "m1_] pos6=[i[1]for i in m1_] u6=[i[2]for i in m1_] r6=[i[3]for i in m1_]", "softmax(Z): e_Z = np.exp(Z - np.max(Z, axis = 0, keepdims = True,initial=-np.inf)) return", "as np import cv2 import os from conv import * import multiprocessing from", "for filename in os.listdir(folder): images.append(cv2.imread(os.path.join(folder,filename),0)) return images def load_data(folder): images=[] n=len(os.listdir(folder)) #print(n) output=[]", "m1_] r7=[i[3]for i in m1_] pool=Pool(4) conv8=pool.map(partial(conv_layer,kernel=k8),m7) pool.close() pool.join() conv8[conv8<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close()", "in m1_] r4=[i[3]for i in m1_] pool=Pool(4) conv5=pool.map(partial(conv_layer,kernel=k5),m4) pool.close() pool.join() conv5[conv5<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv5)", "import sklearn from sklearn import linear_model def load_images_from_folder(folder): images = [] for filename", "iters = 0 for filename in os.listdir(folder): path=folder+\"\\\\\"+filename pictures = load_images_from_folder(path) for pics", "m7=[i[0] for i in m1_] pos7=[i[1]for i in m1_] u7=[i[2]for i in m1_]", "m1_] u5=[i[2]for i in m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close() pool.join()", "pool=Pool(4) conv4=pool.map(partial(conv_layer,kernel=k4),m3) pool.close() pool.join() conv4[conv4<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv4) pool.close() pool.join() m4=[i[0] for i in", "in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) return data def prepare(data,reshape_dim,i): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA)) data[i]=data[i].reshape(data[i].size,1) def prepare_2(data): data=ConvNet(cv2.resize(data/255,(256,256),interpolation=cv2.INTER_AREA))", "pool=Pool(4) conv1=pool.map(partial(conv_layer,kernel=k1),data) pool.close() pool.join() conv1[conv1<=0]=0 pool=Pool(4) m1_=pool.map(max_pooling_,conv1) pool.close() pool.join() m1=[i[0] for i in", "model.fit(data,output) y1=model.predict(x1) y2=model.predict(x2) y3=model.predict(x3) #for i in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def", "in m1_] u5=[i[2]for i in m1_] r5=[i[3]for i in m1_] pool=Pool(4) conv6=pool.map(partial(conv_layer,kernel=k6),m5) pool.close()", "return images,output def convert(l): return (*l,) def data_preprocessing(data,reshape_dim): for i in range(0,len(data)): data[i]=ConvNet(cv2.resize(data[i]/255,reshape_dim,interpolation=cv2.INTER_AREA))", "in range(epoch): # weights=update_weights(data,output,weights,learning_rate) return y1,y2,y3 def CNN(data,output,lr,epoch): k1=np.random.rand(3,3) k2=np.random.rand(3,3) k3=np.random.rand(3,3) k4=np.random.rand(3,3) k5=np.random.rand(3,3)", "update_weights(features,output,weights,learning_rate): predicted=predict(features,weights) print(features.shape) print(weights.shape) print(predicted.shape) #print(np.linalg.norm(predicted-output)) weights=weights-learning_rate*(((output-predicted)@features.T).T) return weights def Adam(features,output,weights,lr,t,beta1=0.9,beta2=0.999,epsilon=1e-08): #print(features.shape) #print(output.shape)", "m1_] r8=[i[3]for i in m1_] def train(folder,reshape_dim,learning_rate,epoch): data,output=load_data(folder) #data=[1,2,3,4,5,6,7,8,9,10,11,12,13] #print(output) #print(output[0].shape) #print(data[0].shape) #print(data[1])", "process.append(p) for x in process: x.start() for x in process: x.join() for i", "pos4=[i[1]for i in m1_] u4=[i[2]for i in m1_] r4=[i[3]for i in m1_] pool=Pool(4)", "import multiprocessing from multiprocessing import Pool from itertools import product from numba import" ]
[ "except Exception: if self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock is None: logging.error('Connection", "#debugLog('sndModule', 'send message. messageType:', \\ # message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId,", "middle error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock: self.hostSock.close() self.hostSock", "pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\", "# message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:',", "message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress,", "HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except", "import * ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac())", "source code is governed by a BSD-style license that can be # found", "of this source code is governed by a BSD-style license that can be", "in the COPYRIGHT file. # agentSndModule # Host agent send module # send", "msg: logging.warning('Relay connection to middle error: {0}'.format(msg)) print 'connect error ' print msg", "except Exception: self.hostSock = None if self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket')", "'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self,", "'connect error ' print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception:", "controllerAddress = 'localhost', controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort))", "self.hostSock is None: logging.error('Connection to controller error in HostAgentSndSocket. Agent will stop.') print", "Host agent send module # send stats, hostJoin, etc to the controller import", "msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: self.hostSock = None if", "messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:', message.flowId, 'sequence:',\\ # message.sequence, 'content:', message.content)", "import LogUtil from hone_message import * ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self,", "'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:', message.flowId, 'sequence:',\\ # message.sequence,", "if self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock is None: logging.error('Connection to controller", "= str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to controller error: {0}'.format(msg)) print", "src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\", "import cPickle as pickle from uuid import getnode as get_mac from cStringIO import", "LogUtil from hone_message import * ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self, controllerAddress", "hostJoin, etc to the controller import socket import sys import logging import cPickle", "= StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule',", "messageType:', \\ # message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\ #", "BSD-style license that can be # found in the COPYRIGHT file. # agentSndModule", "stats, hostJoin, etc to the controller import socket import sys import logging import", "'localhost', controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message =", "= None if self.hostSock is None: logging.error('Connection to controller error in HostAgentSndSocket. Agent", "message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error,", "pickle from uuid import getnode as get_mac from cStringIO import StringIO from agentUtil", "license that can be # found in the COPYRIGHT file. # agentSndModule #", "cPickle as pickle from uuid import getnode as get_mac from cStringIO import StringIO", "print 'connect error ' print msg if self.hostSock: self.hostSock.close() self.hostSock = None except", "this source code is governed by a BSD-style license that can be #", "self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock is None: logging.error('Connection to controller error", "can be # found in the COPYRIGHT file. # agentSndModule # Host agent", "message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return", "print 'socket error in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src = StringIO()", "port)) except socket.error, msg: logging.warning('Relay connection to middle error: {0}'.format(msg)) print 'connect error", "* ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort =", "controller error in HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self, message): if self.hostSock:", "error in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src,", "pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ #", "None except Exception: if self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock is None:", "try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin", "8866 class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort): try: self.hostSock", "if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: self.hostSock = None if self.hostSock", "the controller import socket import sys import logging import cPickle as pickle from", "getnode as get_mac from cStringIO import StringIO from agentUtil import LogUtil from hone_message", "middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay", "'Connection to controller error in HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self, message):", "data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType,", "'\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ # message.messageType, 'jobId', message.jobId, \\", "self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId", "def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port):", "agentSndModule # Host agent send module # send stats, hostJoin, etc to the", "import sys import logging import cPickle as pickle from uuid import getnode as", "from cStringIO import StringIO from agentUtil import LogUtil from hone_message import * ctrlCommPort", "str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to controller error:", "governed by a BSD-style license that can be # found in the COPYRIGHT", "message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() +", "error in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock:", "controller error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock: self.hostSock.close() self.hostSock", "self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ # message.messageType, 'jobId', message.jobId, \\ # 'flowId:',", "stop.') print 'Connection to controller error in HostAgentSndSocket. Agent will stop.' sys.exit() def", "will stop.' sys.exit() def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src,", "Copyright (c) 2011-2013 <NAME>. All rights reserved. # Use of this source code", "str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to controller error: {0}'.format(msg)) print 'connect", "in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src", "as get_mac from cStringIO import StringIO from agentUtil import LogUtil from hone_message import", "StringIO from agentUtil import LogUtil from hone_message import * ctrlCommPort = 8866 class", "= str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to controller", "error ' print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: self.hostSock", "= HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg:", "self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection to middle error: {0}'.format(msg)) print 'connect", "print 'Connection to controller error in HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self,", "def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data =", "= None except Exception: if self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock is", "error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock: self.hostSock.close() self.hostSock =", "self.hostSock.close() self.hostSock = None except Exception: self.hostSock = None if self.hostSock is None:", "1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to controller error: {0}'.format(msg)) print 'connect error", "message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to", "except socket.error, msg: logging.error('connection to controller error: {0}'.format(msg)) print 'connect error ' print", "to controller error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock: self.hostSock.close()", "#debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:', message.flowId, 'sequence:',\\ #", "Use of this source code is governed by a BSD-style license that can", "print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: if self.hostSock: self.hostSock.close()", "that can be # found in the COPYRIGHT file. # agentSndModule # Host", "{0}'.format(msg)) print 'connect error ' print msg if self.hostSock: self.hostSock.close() self.hostSock = None", "self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close()", "HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET,", "None if self.hostSock is None: logging.error('Connection to controller error in HostAgentSndSocket. Agent will", "__init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg:", "= HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message)", "src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:', message.flowId,", "class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port))", "self.hostSock: self.hostSock.close() self.hostSock = None except Exception: if self.hostSock: self.hostSock.close() self.hostSock = None", "message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def", "cStringIO import StringIO from agentUtil import LogUtil from hone_message import * ctrlCommPort =", "Exception: self.hostSock = None if self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket') print", "# found in the COPYRIGHT file. # agentSndModule # Host agent send module", "HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src =", "= ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType", "a BSD-style license that can be # found in the COPYRIGHT file. #", "if self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket'", "self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress,", "= src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId',", "module # send stats, hostJoin, etc to the controller import socket import sys", "by a BSD-style license that can be # found in the COPYRIGHT file.", "controller error in HostAgentSndSocket. Agent will stop.') print 'Connection to controller error in", "sys.exit() def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data", "'\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:',", "file. # agentSndModule # Host agent send module # send stats, hostJoin, etc", "# message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket:", "#message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection to controller error: {0}'.format(msg))", "'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content) def", "error in HostAgentSndSocket. Agent will stop.') print 'Connection to controller error in HostAgentSndSocket.", "+ '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ # message.messageType, 'jobId', message.jobId,", "code is governed by a BSD-style license that can be # found in", "except socket.error, msg: logging.warning('Relay connection to middle error: {0}'.format(msg)) print 'connect error '", "from agentUtil import LogUtil from hone_message import * ctrlCommPort = 8866 class HostAgentSndSocket:", "rights reserved. # Use of this source code is governed by a BSD-style", "to controller error in HostAgentSndSocket. Agent will stop.') print 'Connection to controller error", "src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ # message.messageType, 'jobId', message.jobId, \\ #", "HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self, message): if self.hostSock: src = StringIO()", "if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n'", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection to middle error: {0}'.format(msg))", "in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL)", "found in the COPYRIGHT file. # agentSndModule # Host agent send module #", "recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET,", "message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:', message.flowId, 'sequence:',\\ # message.sequence, 'content:',", "def __init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0,", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection to middle error:", "pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message.", "msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: if self.hostSock: self.hostSock.close() self.hostSock", "self.hostSock.close() self.hostSock = None if self.hostSock is None: logging.error('Connection to controller error in", "controller import socket import sys import logging import cPickle as pickle from uuid", "# Copyright (c) 2011-2013 <NAME>. All rights reserved. # Use of this source", "self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId =", "HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except socket.error, msg: logging.error('connection", "stop.' sys.exit() def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL)", "' print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: if self.hostSock:", "if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: if self.hostSock: self.hostSock.close() self.hostSock =", "= src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ # message.messageType,", "# Host agent send module # send stats, hostJoin, etc to the controller", "HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000)) self.sendMessage(message) except", "import logging import cPickle as pickle from uuid import getnode as get_mac from", "socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId", "self.hostSock = None except Exception: if self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock", "is None: logging.error('Connection to controller error in HostAgentSndSocket. Agent will stop.') print 'Connection", "return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "the COPYRIGHT file. # agentSndModule # Host agent send module # send stats,", "= None if self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error", "socket import sys import logging import cPickle as pickle from uuid import getnode", "All rights reserved. # Use of this source code is governed by a", "<NAME>. All rights reserved. # Use of this source code is governed by", "= 'localhost', controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message", "logging.error('Connection to controller error in HostAgentSndSocket. Agent will stop.') print 'Connection to controller", "# send stats, hostJoin, etc to the controller import socket import sys import", "ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType =", "def __init__(self, middleAddress, port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error,", "\\ # message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\ # message.sequence,", "src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ # message.messageType, 'jobId',", "from hone_message import * ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self, controllerAddress =", "Exception: if self.hostSock: self.hostSock.close() self.hostSock = None if self.hostSock is None: logging.error('Connection to", "to controller error in HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self, message): if", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId =", "StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send", "is governed by a BSD-style license that can be # found in the", "try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection to", "self.hostSock: self.hostSock.close() self.hostSock = None except Exception: self.hostSock = None if self.hostSock is", "HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data", "message. messageType:', \\ # message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\", "self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ # 'flowId:', message.flowId, 'sequence:',\\", "as pickle from uuid import getnode as get_mac from cStringIO import StringIO from", "logging.error('connection to controller error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock:", "self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def", "in HostAgentSndSocket. Agent will stop.') print 'Connection to controller error in HostAgentSndSocket. Agent", "to the controller import socket import sys import logging import cPickle as pickle", "controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress, controllerPort)) message = HoneMessage()", "sys import logging import cPickle as pickle from uuid import getnode as get_mac", "Agent will stop.' sys.exit() def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message,", "src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',", "logging.warning('Relay connection to middle error: {0}'.format(msg)) print 'connect error ' print msg if", "ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort):", "Agent will stop.') print 'Connection to controller error in HostAgentSndSocket. Agent will stop.'", "\\ # message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class", "' print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: self.hostSock =", "message = HoneMessage() message.messageType = HoneMessageType_HostJoin message.hostId = str(get_mac()) #message.hostId = str(random.randint(0, 1000000))", "reserved. # Use of this source code is governed by a BSD-style license", "self.hostSock = None except Exception: self.hostSock = None if self.hostSock is None: logging.warning('socket", "'socket error in HostAgentRelaySndSocket' def sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message,", "hone_message import * ctrlCommPort = 8866 class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost',", "get_mac from cStringIO import StringIO from agentUtil import LogUtil from hone_message import *", "from uuid import getnode as get_mac from cStringIO import StringIO from agentUtil import", "logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def sendMessage(self, message): if", "# Use of this source code is governed by a BSD-style license that", "in HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self, message): if self.hostSock: src =", "data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:', \\ #", "uuid import getnode as get_mac from cStringIO import StringIO from agentUtil import LogUtil", "HostAgentSndSocket. Agent will stop.') print 'Connection to controller error in HostAgentSndSocket. Agent will", "message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content)", "self.hostSock = None if self.hostSock is None: logging.error('Connection to controller error in HostAgentSndSocket.", "print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: self.hostSock = None", "is None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def sendMessage(self,", "socket.error, msg: logging.warning('Relay connection to middle error: {0}'.format(msg)) print 'connect error ' print", "error ' print msg if self.hostSock: self.hostSock.close() self.hostSock = None except Exception: if", "+ '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\ # message.messageType, 'jobId', message.jobId,\\ #", "\\ # 'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close()", "(c) 2011-2013 <NAME>. All rights reserved. # Use of this source code is", "socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection to middle error: {0}'.format(msg)) print", "msg: logging.error('connection to controller error: {0}'.format(msg)) print 'connect error ' print msg if", "to middle error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock: self.hostSock.close()", "closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try:", "be # found in the COPYRIGHT file. # agentSndModule # Host agent send", "# 'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def", "None if self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error in", "'sequence:', \\ # message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024)", "2011-2013 <NAME>. All rights reserved. # Use of this source code is governed", "None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket error in HostAgentRelaySndSocket' def sendMessage(self, message):", "self.hostSock.close() def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock", "'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content) def closeSocket(self): self.hostSock.close() def recvMessage(self):", "COPYRIGHT file. # agentSndModule # Host agent send module # send stats, hostJoin,", "import getnode as get_mac from cStringIO import StringIO from agentUtil import LogUtil from", "class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort): try: self.hostSock =", "socket.error, msg: logging.error('connection to controller error: {0}'.format(msg)) print 'connect error ' print msg", "self.sendMessage(message) except socket.error, msg: logging.error('connection to controller error: {0}'.format(msg)) print 'connect error '", "agentUtil import LogUtil from hone_message import * ctrlCommPort = 8866 class HostAgentSndSocket: def", "# agentSndModule # Host agent send module # send stats, hostJoin, etc to", "= 8866 class HostAgentSndSocket: def __init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort): try:", "'send message. messageType:', \\ # message.messageType, 'jobId', message.jobId, \\ # 'flowId:', message.flowId, 'sequence:',", "self.hostSock.close() self.hostSock = None except Exception: if self.hostSock: self.hostSock.close() self.hostSock = None if", "if self.hostSock is None: logging.error('Connection to controller error in HostAgentSndSocket. Agent will stop.')", "= None except Exception: self.hostSock = None if self.hostSock is None: logging.warning('socket error", "agent send module # send stats, hostJoin, etc to the controller import socket", "None except Exception: self.hostSock = None if self.hostSock is None: logging.warning('socket error in", "src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data)", "logging import cPickle as pickle from uuid import getnode as get_mac from cStringIO", "will stop.') print 'Connection to controller error in HostAgentSndSocket. Agent will stop.' sys.exit()", "send module # send stats, hostJoin, etc to the controller import socket import", "message.jobId, \\ # 'flowId:', message.flowId, 'sequence:', \\ # message.sequence, 'content:', message.content) def closeSocket(self):", "self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection to middle", "self.hostSock = None if self.hostSock is None: logging.warning('socket error in HostAgentRelaySndSocket') print 'socket", "import socket import sys import logging import cPickle as pickle from uuid import", "None: logging.error('Connection to controller error in HostAgentSndSocket. Agent will stop.') print 'Connection to", "def recvMessage(self): return self.hostSock.recv(1024) class HostAgentRelaySndSocket: def __init__(self, middleAddress, port): try: self.hostSock =", "sendMessage(self, message): if self.hostSock: src = StringIO() pickle.dump(message, src, pickle.HIGHEST_PROTOCOL) data = src.getvalue()", "etc to the controller import socket import sys import logging import cPickle as", "connection to middle error: {0}'.format(msg)) print 'connect error ' print msg if self.hostSock:", "__init__(self, controllerAddress = 'localhost', controllerPort = ctrlCommPort): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((controllerAddress,", "port): try: self.hostSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.hostSock.connect((middleAddress, port)) except socket.error, msg: logging.warning('Relay connection", "import StringIO from agentUtil import LogUtil from hone_message import * ctrlCommPort = 8866", "error in HostAgentSndSocket. Agent will stop.' sys.exit() def sendMessage(self, message): if self.hostSock: src", "src, pickle.HIGHEST_PROTOCOL) data = src.getvalue() + '\\r\\n' src.close() self.hostSock.sendall(data) #debugLog('sndModule', 'send message. messageType:',\\", "send stats, hostJoin, etc to the controller import socket import sys import logging" ]
[ "= 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT,", "'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL", "Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), )", "Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT,", "'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = (", "= 'FL' TYRE_POSITION_FRONT_RIGHT = 'FR' TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL =", "= ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH =", "(TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH = 'resources/data_measurements_finals.csv' FIELD_TIMESTAMP_FORMAT", "'FL' TYRE_POSITION_FRONT_RIGHT = 'FR' TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front", "TYRE_POSITION_FRONT_RIGHT = 'FR' TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left'", "TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL),", "( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH = 'resources/data_measurements_finals.csv'", "TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS", "TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH", "'FR' TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL =", "TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL =", "'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL),", "TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH = 'resources/data_measurements_finals.csv' FIELD_TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S'", "'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL),", "(TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH = 'resources/data_measurements_finals.csv' FIELD_TIMESTAMP_FORMAT = '%Y-%m-%d", "TYRE_POSITION_FRONT_LEFT = 'FL' TYRE_POSITION_FRONT_RIGHT = 'FR' TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL", "'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left'", "= 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS =", "TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL), (TYRE_POSITION_REAR_RIGHT, TYRE_POSITION_REAR_RIGHT_LABEL), ) SAMPLE_DATA_PATH = 'resources/data_measurements_finals.csv' FIELD_TIMESTAMP_FORMAT =", "= 'FR' TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL", "Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT,", "Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right'", "TYRE_POSITION_REAR_LEFT = 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front", "= 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT,", "= 'RL' TYRE_POSITION_REAR_RIGHT = 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right'", "= 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL =", "'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear", "TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear Left' TYRE_POSITION_REAR_RIGHT_LABEL", "TYRE_POSITION_REAR_RIGHT_LABEL = 'Rear Right' TYRE_POSITIONS = ( (TYRE_POSITION_FRONT_LEFT, TYRE_POSITION_FRONT_LEFT_LABEL), (TYRE_POSITION_FRONT_RIGHT, TYRE_POSITION_FRONT_RIGHT_LABEL), (TYRE_POSITION_REAR_LEFT, TYRE_POSITION_REAR_LEFT_LABEL),", "= 'RR' TYRE_POSITION_FRONT_LEFT_LABEL = 'Front Left' TYRE_POSITION_FRONT_RIGHT_LABEL = 'Front Right' TYRE_POSITION_REAR_LEFT_LABEL = 'Rear" ]
[ "def fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if __name__ == '__main__':", "TimeTravel: def __init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now -", "__init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes)", "minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt = TimeTravel()", "- timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt = TimeTravel() time = tt.fuck_go_back_by(hours=1,", "class TimeTravel: def __init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now", "import datetime, timedelta class TimeTravel: def __init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0,", "fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt", "= datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if __name__", "datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if __name__ ==", "hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt =", "datetime import datetime, timedelta class TimeTravel: def __init__(self): self.now = datetime.now() def fuck_go_back_by(self,", "return self.now - timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt = TimeTravel() time", "self.now - timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt = TimeTravel() time =", "self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours, minutes=minutes) if", "timedelta(hours=hours, minutes=minutes) if __name__ == '__main__': tt = TimeTravel() time = tt.fuck_go_back_by(hours=1, minutes=10)", "from datetime import datetime, timedelta class TimeTravel: def __init__(self): self.now = datetime.now() def", "timedelta class TimeTravel: def __init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return", "<filename>other/time_travel.py<gh_stars>0 from datetime import datetime, timedelta class TimeTravel: def __init__(self): self.now = datetime.now()", "minutes=minutes) if __name__ == '__main__': tt = TimeTravel() time = tt.fuck_go_back_by(hours=1, minutes=10) print(time)", "datetime, timedelta class TimeTravel: def __init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0):", "def __init__(self): self.now = datetime.now() def fuck_go_back_by(self, hours=0, minutes=0): return self.now - timedelta(hours=hours," ]
[ "multiprocessing try: import pyOHOL except ImportError as e: print(\"Client is not installed\") raise", "client import sys import multiprocessing try: import pyOHOL except ImportError as e: print(\"Client", "# use after installing the client to run the client import sys import", "import pyOHOL except ImportError as e: print(\"Client is not installed\") raise e def", "sys import multiprocessing try: import pyOHOL except ImportError as e: print(\"Client is not", "is not installed\") raise e def main(): multiprocessing.freeze_support() pyOHOL.main() if __name__ == \"__main__\":", "run the client import sys import multiprocessing try: import pyOHOL except ImportError as", "the client import sys import multiprocessing try: import pyOHOL except ImportError as e:", "try: import pyOHOL except ImportError as e: print(\"Client is not installed\") raise e", "print(\"Client is not installed\") raise e def main(): multiprocessing.freeze_support() pyOHOL.main() if __name__ ==", "installing the client to run the client import sys import multiprocessing try: import", "import multiprocessing try: import pyOHOL except ImportError as e: print(\"Client is not installed\")", "after installing the client to run the client import sys import multiprocessing try:", "pyOHOL except ImportError as e: print(\"Client is not installed\") raise e def main():", "use after installing the client to run the client import sys import multiprocessing", "except ImportError as e: print(\"Client is not installed\") raise e def main(): multiprocessing.freeze_support()", "the client to run the client import sys import multiprocessing try: import pyOHOL", "ImportError as e: print(\"Client is not installed\") raise e def main(): multiprocessing.freeze_support() pyOHOL.main()", "to run the client import sys import multiprocessing try: import pyOHOL except ImportError", "client to run the client import sys import multiprocessing try: import pyOHOL except", "as e: print(\"Client is not installed\") raise e def main(): multiprocessing.freeze_support() pyOHOL.main() if", "import sys import multiprocessing try: import pyOHOL except ImportError as e: print(\"Client is", "not installed\") raise e def main(): multiprocessing.freeze_support() pyOHOL.main() if __name__ == \"__main__\": main()", "e: print(\"Client is not installed\") raise e def main(): multiprocessing.freeze_support() pyOHOL.main() if __name__" ]
[ "and hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper", "request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs) else: return not_authenticated() return", "**kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs) else: return", "json from Api.utils import * def admin_required(func): def _wrapper(self, request, *args, **kwargs): if", "not_authenticated() return _wrapper def customer_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and", "not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and", "return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def userinfo_required(func): def", "func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self,", "from Api.utils import * def admin_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated", "admin_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self,", "def admin_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return", "'admin'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def customer_required(func):", "return not_authenticated() return _wrapper def customer_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated", "return _wrapper def userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user,", "* def admin_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'):", "_wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request, *args,", "if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs) else: return not_authenticated()", "_wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request, *args,", "if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request, *args, **kwargs) else: return not_authenticated()", "if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs) else: return not_authenticated()", "*args, **kwargs) else: return not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self, request, *args,", "request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request, *args, **kwargs) else: return not_authenticated() return", "customer_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self,", "import json from Api.utils import * def admin_required(func): def _wrapper(self, request, *args, **kwargs):", "*args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs) else:", "request, *args, **kwargs) else: return not_authenticated() return _wrapper def customer_required(func): def _wrapper(self, request,", "and hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper", "def customer_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return", "import * def admin_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user,", "return _wrapper def customer_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user,", "else: return not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if", "def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request,", "and hasattr(request.user, 'userinfo'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper", "return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def customer_required(func): def", "hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def", "userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self,", "**kwargs) else: return not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self, request, *args, **kwargs):", "**kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs) else: return", "request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs)", "else: return not_authenticated() return _wrapper def customer_required(func): def _wrapper(self, request, *args, **kwargs): if", "*args, **kwargs) else: return not_authenticated() return _wrapper def customer_required(func): def _wrapper(self, request, *args,", "'customer'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def userinfo_required(func):", "<reponame>riverstation/project-all import json from Api.utils import * def admin_required(func): def _wrapper(self, request, *args,", "hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def", "request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request, *args, **kwargs)", "func(self, request, *args, **kwargs) else: return not_authenticated() return _wrapper def customer_required(func): def _wrapper(self,", "*args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request, *args, **kwargs) else:", "**kwargs) else: return not_authenticated() return _wrapper def customer_required(func): def _wrapper(self, request, *args, **kwargs):", "Api.utils import * def admin_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and", "return not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated", "def userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return", "request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request, *args, **kwargs) else: return not_authenticated() return", "request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs)", "**kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request, *args, **kwargs) else: return", "def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'): return func(self, request,", "_wrapper def userinfo_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'userinfo'):", "_wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request, *args,", "request, *args, **kwargs) else: return not_authenticated() return _wrapper def userinfo_required(func): def _wrapper(self, request,", "_wrapper def customer_required(func): def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'):", "*args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'customer'): return func(self, request, *args, **kwargs) else:", "def _wrapper(self, request, *args, **kwargs): if request.user.is_authenticated and hasattr(request.user, 'admin'): return func(self, request," ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "airflow.models import LastDeployedTime from datetime import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(),", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "the License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03", "License. # You may obtain a copy of the License at # #", "Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None depends_on = None", "from airflow.models import LastDeployedTime from datetime import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed',", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "None from alembic import op import sqlalchemy as sa from airflow.models import LastDeployedTime", "compliance with the License. # You may obtain a copy of the License", "specific language governing permissions and # limitations under the License. \"\"\"Create LastDeployedTime table", "None depends_on = None from alembic import op import sqlalchemy as sa from", "language governing permissions and # limitations under the License. \"\"\"Create LastDeployedTime table Revision", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# revision identifiers, used by Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels", "this file except in compliance with the License. # You may obtain a", "ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used", "53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used by", "for the specific language governing permissions and # limitations under the License. \"\"\"Create", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers,", "you may not use this file except in compliance with the License. #", "identifiers, used by Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "permissions and # limitations under the License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1", "= None from alembic import op import sqlalchemy as sa from airflow.models import", "LastDeployedTime from datetime import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(), primary_key=True) )", "ANY KIND, either express or implied. # See the License for the specific", "and # limitations under the License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises:", "23:18:22.731457 \"\"\" # revision identifiers, used by Alembic. revision = '53bee4c621a1' down_revision =", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used by Alembic. revision =", "use this file except in compliance with the License. # You may obtain", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "not use this file except in compliance with the License. # You may", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None depends_on = None from alembic", "import LastDeployedTime from datetime import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(), primary_key=True)", "<PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used by Alembic. revision", "= None depends_on = None from alembic import op import sqlalchemy as sa", "See the License for the specific language governing permissions and # limitations under", "Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used by Alembic.", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "as sa from airflow.models import LastDeployedTime from datetime import datetime def upgrade(): op.create_table(", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" # revision", "OF ANY KIND, either express or implied. # See the License for the", "2.0 (the \"License\"); # you may not use this file except in compliance", "\"\"\" # revision identifiers, used by Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>'", "# you may not use this file except in compliance with the License.", "agreed to in writing, software # distributed under the License is distributed on", "alembic import op import sqlalchemy as sa from airflow.models import LastDeployedTime from datetime", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used by Alembic. revision = '53bee4c621a1' down_revision", "from alembic import op import sqlalchemy as sa from airflow.models import LastDeployedTime from", "(the \"License\"); # you may not use this file except in compliance with", "LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\" #", "import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(), primary_key=True) ) LastDeployedTime().set_last_deployed(datetime.utcnow()) def downgrade():", "sqlalchemy as sa from airflow.models import LastDeployedTime from datetime import datetime def upgrade():", "# # Unless required by applicable law or agreed to in writing, software", "\"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457 \"\"\"", "express or implied. # See the License for the specific language governing permissions", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "by applicable law or agreed to in writing, software # distributed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "revision identifiers, used by Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels =", "sa from airflow.models import LastDeployedTime from datetime import datetime def upgrade(): op.create_table( 'last_deployed_time',", "either express or implied. # See the License for the specific language governing", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(), primary_key=True) ) LastDeployedTime().set_last_deployed(datetime.utcnow()) def downgrade(): op.drop_table(\"last_deployed_time\")", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "Date: 2020-05-03 23:18:22.731457 \"\"\" # revision identifiers, used by Alembic. revision = '53bee4c621a1'", "down_revision = 'c2<PASSWORD>' branch_labels = None depends_on = None from alembic import op", "used by Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None depends_on", "file except in compliance with the License. # You may obtain a copy", "= 'c2<PASSWORD>' branch_labels = None depends_on = None from alembic import op import", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License for the specific language governing permissions and # limitations under the License.", "import sqlalchemy as sa from airflow.models import LastDeployedTime from datetime import datetime def", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None depends_on = None from", "the License. # You may obtain a copy of the License at #", "to in writing, software # distributed under the License is distributed on an", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "implied. # See the License for the specific language governing permissions and #", "License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date: 2020-05-03 23:18:22.731457", "\"License\"); # you may not use this file except in compliance with the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "import op import sqlalchemy as sa from airflow.models import LastDeployedTime from datetime import", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "required by applicable law or agreed to in writing, software # distributed under", "applicable law or agreed to in writing, software # distributed under the License", "from datetime import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(), primary_key=True) ) LastDeployedTime().set_last_deployed(datetime.utcnow())", "by Alembic. revision = '53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None depends_on =", "or agreed to in writing, software # distributed under the License is distributed", "'53bee4c621a1' down_revision = 'c2<PASSWORD>' branch_labels = None depends_on = None from alembic import", "or implied. # See the License for the specific language governing permissions and", "depends_on = None from alembic import op import sqlalchemy as sa from airflow.models", "limitations under the License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create", "branch_labels = None depends_on = None from alembic import op import sqlalchemy as", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "'c2<PASSWORD>' branch_labels = None depends_on = None from alembic import op import sqlalchemy", "op import sqlalchemy as sa from airflow.models import LastDeployedTime from datetime import datetime", "# limitations under the License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD>", "with the License. # You may obtain a copy of the License at", "governing permissions and # limitations under the License. \"\"\"Create LastDeployedTime table Revision ID:", "datetime import datetime def upgrade(): op.create_table( 'last_deployed_time', sa.Column('last_deployed', sa.DateTime(), primary_key=True) ) LastDeployedTime().set_last_deployed(datetime.utcnow()) def", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "under the License. \"\"\"Create LastDeployedTime table Revision ID: 53bee4c621a1 Revises: <PASSWORD> Create Date:", "in writing, software # distributed under the License is distributed on an \"AS", "the specific language governing permissions and # limitations under the License. \"\"\"Create LastDeployedTime", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "from django.contrib import admin from.models import Ticket,Customeuser # Register your models here. admin.site.register(Ticket)", "django.contrib import admin from.models import Ticket,Customeuser # Register your models here. admin.site.register(Ticket) admin.site.register(Customeuser)" ]
[ "sys from scrapy import cmdline if __name__ == '__main__': sys.path.append(os.path.dirname(os.path.abspath(__file__))) cmdline.execute(['scrapy', 'crawl', 'alibaba'])", "# coding:utf-8 import os import sys from scrapy import cmdline if __name__ ==", "import os import sys from scrapy import cmdline if __name__ == '__main__': sys.path.append(os.path.dirname(os.path.abspath(__file__)))", "<reponame>dgdell/AlibabaInternation # coding:utf-8 import os import sys from scrapy import cmdline if __name__", "import sys from scrapy import cmdline if __name__ == '__main__': sys.path.append(os.path.dirname(os.path.abspath(__file__))) cmdline.execute(['scrapy', 'crawl',", "os import sys from scrapy import cmdline if __name__ == '__main__': sys.path.append(os.path.dirname(os.path.abspath(__file__))) cmdline.execute(['scrapy',", "coding:utf-8 import os import sys from scrapy import cmdline if __name__ == '__main__':" ]
[ "writing, software # distributed under the License is distributed on an \"AS IS\"", "\\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test size number of samples for", "minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train = sample_train /", "KIND, either express or implied. # See the License for the specific language", "training and test size number of samples for each class label training_input =", "Unless required by applicable law or agreed to in writing, software # distributed", "language governing permissions and # limitations under the License. \"\"\" Common dataset preprocessing", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# pylint: disable=no-member sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed,", "dataset preprocessing routine \"\"\" import numpy as np from sklearn.model_selection import train_test_split from", "PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to the range (0,", "gaussian around 0 with unit variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test", "to the range (0, +1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0,", "# Pick training and test size number of samples for each class label", "= minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train,", "StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the number of features", "a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable", "License. \"\"\" Common dataset preprocessing routine \"\"\" import numpy as np from sklearn.model_selection", "normalize=True): \"\"\" Common dataset preprocessing routine \"\"\" # pylint: disable=no-member sample_train, sample_test, label_train,", "Common dataset preprocessing routine \"\"\" # pylint: disable=no-member sample_train, sample_test, label_train, label_test =", "class label training_input = {key: (sample_train[label_train == key, :])[:training_size] for key in class_labels}", "project. # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\", "law or agreed to in writing, software # distributed under the License is", "(sample_train[label_train == key, :])[:training_size] for key in class_labels} test_input = {key: (sample_test[label_test ==", "the License for the specific language governing permissions and # limitations under the", "# Normalize rows. if normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test", "compliance with the License. # You may obtain a copy of the License", "test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian around 0 with unit variance", "test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine", "under the License. \"\"\" Common dataset preprocessing routine \"\"\" import numpy as np", "Common dataset preprocessing routine \"\"\" import numpy as np from sklearn.model_selection import train_test_split", "= std_scale.transform(sample_test) # Reduce the number of features if features < max_features: pca", "preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42,", "\"\"\" # pylint: disable=no-member sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes,", "this file except in compliance with the License. # You may obtain a", "data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian around 0 with unit", "std_scale.transform(sample_test) # Reduce the number of features if features < max_features: pca =", "and test size number of samples for each class label training_input = {key:", "stratify=data.target) # Standardize for gaussian around 0 with unit variance std_scale = StandardScaler().fit(sample_train)", "pylint: disable=no-member sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True,", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed", "you may not use this file except in compliance with the License. #", "sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) #", "label training_input = {key: (sample_train[label_train == key, :])[:training_size] for key in class_labels} test_input", "minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows.", "http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software", "if normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test /", "governing permissions and # limitations under the License. \"\"\" Common dataset preprocessing routine", "numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from", "random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian around 0 with unit variance std_scale", "# http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing,", "ANY KIND, either express or implied. # See the License for the specific", "# Standardize for gaussian around 0 with unit variance std_scale = StandardScaler().fit(sample_train) sample_train", "std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the number", "size number of samples for each class label training_input = {key: (sample_train[label_train ==", "= std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the number of features if features", "preprocessing routine \"\"\" # pylint: disable=no-member sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data,", "\\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian around 0", "Reduce the number of features if features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train", "sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) #", "key in class_labels} test_input = {key: (sample_test[label_test == key, :])[:test_size] for key in", "from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA", "in compliance with the License. # You may obtain a copy of the", "\"\"\" Common dataset preprocessing routine \"\"\" import numpy as np from sklearn.model_selection import", "disable=no-member sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target)", "{key: (sample_train[label_train == key, :])[:training_size] for key in class_labels} test_input = {key: (sample_test[label_test", "(sample_test[label_test == key, :])[:test_size] for key in class_labels} return sample_train, training_input, test_input, class_labels", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "= MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows. if", "use this file except in compliance with the License. # You may obtain", "for the specific language governing permissions and # limitations under the License. \"\"\"", "variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the", "features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) #", "not use this file except in compliance with the License. # You may", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "limitations under the License. \"\"\" Common dataset preprocessing routine \"\"\" import numpy as", "axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize", "features if features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test =", "Scale to the range (0, +1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale =", "sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test size", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "preprocessing routine \"\"\" import numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing", "as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "if features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test)", ":])[:training_size] for key in class_labels} test_input = {key: (sample_test[label_test == key, :])[:test_size] for", "max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine \"\"\" #", "routine \"\"\" # pylint: disable=no-member sample_train, sample_test, label_train, label_test = \\ train_test_split(data.data, data.target,", "Pick training and test size number of samples for each class label training_input", "OF ANY KIND, either express or implied. # See the License for the", "for gaussian around 0 with unit variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train)", "sample_test, label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize", "of features if features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test", "= pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to the range (0, +1) samples", "= {key: (sample_train[label_train == key, :])[:training_size] for key in class_labels} test_input = {key:", "2.0 (the \"License\"); # you may not use this file except in compliance", "np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test)", "obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by", "around 0 with unit variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test =", "# Scale to the range (0, +1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale", "copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law", "# you may not use this file except in compliance with the License.", "< max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale", "max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to", "permissions and # limitations under the License. \"\"\" Common dataset preprocessing routine \"\"\"", "sample_test = minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train = sample_train / \\", "agreed to in writing, software # distributed under the License is distributed on", "specific language governing permissions and # limitations under the License. \"\"\" Common dataset", "\\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training", "PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "class_labels} test_input = {key: (sample_test[label_test == key, :])[:test_size] for key in class_labels} return", "at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in", "(the \"License\"); # you may not use this file except in compliance with", "def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset", "License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to", "data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine \"\"\" # pylint:", "random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine \"\"\" # pylint: disable=no-member sample_train, sample_test,", "rows. if normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test", "== key, :])[:training_size] for key in class_labels} test_input = {key: (sample_test[label_test == key,", "{key: (sample_test[label_test == key, :])[:test_size] for key in class_labels} return sample_train, training_input, test_input,", "express or implied. # See the License for the specific language governing permissions", "the range (0, +1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples)", "MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows. if normalize:", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "2021 qclib project. # Licensed under the Apache License, Version 2.0 (the \"License\");", "except in compliance with the License. # You may obtain a copy of", "axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test", "import numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler", "samples for each class label training_input = {key: (sample_train[label_train == key, :])[:training_size] for", "by applicable law or agreed to in writing, software # distributed under the", "1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train", "# limitations under the License. \"\"\" Common dataset preprocessing routine \"\"\" import numpy", "sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the number of features if", "MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int,", "pca.transform(sample_test) # Scale to the range (0, +1) samples = np.append(sample_train, sample_test, axis=0)", "label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian", "either express or implied. # See the License for the specific language governing", "in class_labels} test_input = {key: (sample_test[label_test == key, :])[:test_size] for key in class_labels}", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "for each class label training_input = {key: (sample_train[label_train == key, :])[:training_size] for key", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "# You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #", "sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int,", "label_train, label_test = \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for", "Standardize for gaussian around 0 with unit variance std_scale = StandardScaler().fit(sample_train) sample_train =", "pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to the", "class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine \"\"\" # pylint: disable=no-member", "may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "dataset preprocessing routine \"\"\" # pylint: disable=no-member sample_train, sample_test, label_train, label_test = \\", "samples = np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test", "num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine \"\"\" # pylint: disable=no-member sample_train,", "(0, +1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train =", "number of samples for each class label training_input = {key: (sample_train[label_train == key,", "file except in compliance with the License. # You may obtain a copy", "Copyright 2021 qclib project. # Licensed under the Apache License, Version 2.0 (the", "License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0", "import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\"", "the specific language governing permissions and # limitations under the License. \"\"\" Common", "test size number of samples for each class label training_input = {key: (sample_train[label_train", "sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test,", "features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True): \"\"\" Common dataset preprocessing routine \"\"\"", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "unit variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce", "= sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1))", "License for the specific language governing permissions and # limitations under the License.", "sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list, num_classes:int, random_seed=42, normalize=True):", "= minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train = sample_train", "np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and", "sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test size number of", "# Reduce the number of features if features < max_features: pca = PCA(n_components=features).fit(sample_train)", "the License. # You may obtain a copy of the License at #", "train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian around 0 with", "np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import", "/ \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick", "of samples for each class label training_input = {key: (sample_train[label_train == key, :])[:training_size]", "\"\"\" Common dataset preprocessing routine \"\"\" # pylint: disable=no-member sample_train, sample_test, label_train, label_test", "to in writing, software # distributed under the License is distributed on an", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "number of features if features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train)", "from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int,", "shuffle=True, stratify=data.target) # Standardize for gaussian around 0 with unit variance std_scale =", "minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1))", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "axis=1).reshape((len(sample_test),1)) # Pick training and test size number of samples for each class", "implied. # See the License for the specific language governing permissions and #", "= {key: (sample_test[label_test == key, :])[:test_size] for key in class_labels} return sample_train, training_input,", "\"License\"); # you may not use this file except in compliance with the", "Normalize rows. if normalize: sample_train = sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test =", "# Copyright 2021 qclib project. # Licensed under the Apache License, Version 2.0", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "= PCA(n_components=features).fit(sample_train) sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to the range", "required by applicable law or agreed to in writing, software # distributed under", "range (0, +1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train", "sample_test = std_scale.transform(sample_test) # Reduce the number of features if features < max_features:", "applicable law or agreed to in writing, software # distributed under the License", "You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless", "routine \"\"\" import numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import", "import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int,", "StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data, class_labels:list,", "training_input = {key: (sample_train[label_train == key, :])[:training_size] for key in class_labels} test_input =", "np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test size number of samples for each", "/ \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test size number of samples", "key, :])[:training_size] for key in class_labels} test_input = {key: (sample_test[label_test == key, :])[:test_size]", "+1) samples = np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train)", "train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int,", "and # limitations under the License. \"\"\" Common dataset preprocessing routine \"\"\" import", "= \\ train_test_split(data.data, data.target, test_size=test_size*num_classes, random_state=random_seed, shuffle=True, stratify=data.target) # Standardize for gaussian around", "sample_train = minmax_scale.transform(sample_train) sample_test = minmax_scale.transform(sample_test) # Normalize rows. if normalize: sample_train =", "sample_test = pca.transform(sample_test) # Scale to the range (0, +1) samples = np.append(sample_train,", "sample_train / \\ np.linalg.norm(sample_train, axis=1).reshape((len(sample_train),1)) sample_test = sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) #", "or agreed to in writing, software # distributed under the License is distributed", "= sample_test / \\ np.linalg.norm(sample_test, axis=1).reshape((len(sample_test),1)) # Pick training and test size number", "for key in class_labels} test_input = {key: (sample_test[label_test == key, :])[:test_size] for key", "import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def preprocessing(training_size:int, test_size:int, features:int, max_features:int, data,", "0 with unit variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test)", "= StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the number of", "= pca.transform(sample_test) # Scale to the range (0, +1) samples = np.append(sample_train, sample_test,", "or implied. # See the License for the specific language governing permissions and", "sample_train = pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to the range (0, +1)", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "with unit variance std_scale = StandardScaler().fit(sample_train) sample_train = std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) #", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "pca.transform(sample_train) sample_test = pca.transform(sample_test) # Scale to the range (0, +1) samples =", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "each class label training_input = {key: (sample_train[label_train == key, :])[:training_size] for key in", "<reponame>carstenblank/qclib # Copyright 2021 qclib project. # Licensed under the Apache License, Version", "sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler, MinMaxScaler from sklearn.decomposition import PCA def", "with the License. # You may obtain a copy of the License at", "qclib project. # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "in writing, software # distributed under the License is distributed on an \"AS", "the number of features if features < max_features: pca = PCA(n_components=features).fit(sample_train) sample_train =", "\"\"\" import numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler,", "test_input = {key: (sample_test[label_test == key, :])[:test_size] for key in class_labels} return sample_train,", "std_scale.transform(sample_train) sample_test = std_scale.transform(sample_test) # Reduce the number of features if features <", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "the License. \"\"\" Common dataset preprocessing routine \"\"\" import numpy as np from", "= np.append(sample_train, sample_test, axis=0) minmax_scale = MinMaxScaler((0, 1)).fit(samples) sample_train = minmax_scale.transform(sample_train) sample_test =" ]
[ "was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def", "test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value", "was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS =", "in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time,", "def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def func(): pass p_perf_counter.return_value = 1", "0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output):", "p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def func(): pass", "'executed (func) for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'),", "mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking", "False @timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking", "def func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for", "time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def", "timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking =", "(func) for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ])", "def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1", "@timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking =", "1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter')", "test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def", "@mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def func(): pass p_perf_counter.return_value", "= True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT,", "@mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS) def func():", "@mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value", "mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time", "import timer from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer()", "from unittest import mock from timy import timer from timy.settings import timy_config @mock.patch('timy.output')", "(func) for {} times in 0.000000'.format(LOOPS)), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ])", "p_output): timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value =", "0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS = 4", "timy_config.tracking = False @timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter,", "func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1", "pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time", "pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def", "timer from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def", "time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def", "@mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def func(): pass p_perf_counter.return_value =", "@timer() def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func)", "p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times in", "= 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT,", "'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True", "func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1", "p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best", "@mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS) def", "= 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times in 0.000000'.format(LOOPS)),", "4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed", "timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call(", "True LOOPS = 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([", "timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func(): pass", "pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times", "1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time')", "func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times in 0.000000'.format(LOOPS)), mock.call( timy_config.DEFAULT_IDENT,", "func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT,", "= True @timer() def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT,", "def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS) def func(): pass", "test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def func(): pass p_perf_counter.return_value = 1 func()", "import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func(): pass func()", "LOOPS = 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call(", "@mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output')", "p_output): timy_config.tracking = True @timer() def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([", "test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1 func()", "time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False)", "import mock from timy import timer from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output):", "func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {}", "def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for", "timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1", "time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS", "timy_config.tracking = True @timer() def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call(", "mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times in 0.000000'.format(LOOPS)), mock.call( timy_config.DEFAULT_IDENT, 'best time", "@timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func)", "<gh_stars>100-1000 from unittest import mock from timy import timer from timy.settings import timy_config", "0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output):", "]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass", "= True LOOPS = 4 @timer(loops=LOOPS) def func(): pass p_perf_counter.return_value = 1 func()", "'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True", "p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times in 0.000000'.format(LOOPS)), mock.call( timy_config.DEFAULT_IDENT, 'best", "= 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in 0.000000'),", "def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True", "pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time", "@mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value =", "timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func(): pass func() p_output.assert_not_called()", "= False @timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output):", "@timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func)", "1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in 0.000000'), mock.call(", "p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in", "p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([", "in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter,", "]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking = True LOOPS = 4 @timer(loops=LOOPS)", "def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter')", "func(): pass func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer()", "timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking =", "timy import timer from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False", "unittest import mock from timy import timer from timy.settings import timy_config @mock.patch('timy.output') def", "True @timer(include_sleeptime=False) def func(): pass p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed", "mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_with_loops(p_perf_counter, p_output): timy_config.tracking", "'executed (func) for {} times in 0.000000'.format(LOOPS)), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'),", "for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was 0.000000'), ]) @mock.patch('timy.output')", "from timy import timer from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking =", "from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking = False @timer() def func():", "0.000000'), ]) @mock.patch('timy.output') @mock.patch('time.process_time') def test_timer_include_sleeptime_no(p_process_time, p_output): timy_config.tracking = True @timer(include_sleeptime=False) def func():", "p_process_time.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in", "func() p_output.assert_not_called() @mock.patch('timy.output') @mock.patch('time.perf_counter') def test_timer_include_sleeptime(p_perf_counter, p_output): timy_config.tracking = True @timer() def func():", "mock from timy import timer from timy.settings import timy_config @mock.patch('timy.output') def test_timer_no_tracking(p_output): timy_config.tracking", "True @timer() def func(): pass p_perf_counter.return_value = 1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed", "timy_config.DEFAULT_IDENT, 'executed (func) for 1 time in 0.000000'), mock.call( timy_config.DEFAULT_IDENT, 'best time was", "timy_config.DEFAULT_IDENT, 'executed (func) for {} times in 0.000000'.format(LOOPS)), mock.call( timy_config.DEFAULT_IDENT, 'best time was", "1 func() p_output.assert_has_calls([ mock.call( timy_config.DEFAULT_IDENT, 'executed (func) for {} times in 0.000000'.format(LOOPS)), mock.call(" ]
[ "= response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) # Create item", "timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization =", "('broker', '')) # Auction creation data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data})", "= self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t", "response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation", "'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch", "= self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation response = self.app.post_json('/auctions', {'data': data})", "Change rectification period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate,", "data['items'] = [self.initial_item_data] # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201", "{'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You", "{'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id),", "len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id),", "and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified':", "self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction", "= response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id,", "item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] #", "# Create one item and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data},", "response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response", "self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() #", "response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and check", "'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id,", "= calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) #", "Trying to create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403", "headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification period fromdb", "self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data =", "fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb =", "# Update items with batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items =", "# Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id =", "bidder_id, bid_token), {'data': {'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) )", "'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) # Create item response = self.app.post_json(", "response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description': 'DESCRIPTION_' +", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header =", "True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token", "openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy()", "self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't", "{'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self):", "response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id,", "to create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 )", "'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200", "data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic',", "headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']),", "if items can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403", "\"status\": \"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id", "response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id']", ") item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change status in", "self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) )", "import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() #", "Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id", "= response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification period fromdb = self.db.get(auction_id) fromdb", "= self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items", "bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate']", "{'data': {'status': 'active'}} ) # Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data},", "bid_token), {'data': {'status': 'active'}} ) # Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data':", "{'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period')", "self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change", "patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization", "item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after", "batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] #", "-*- from uuid import uuid4 from copy import deepcopy from datetime import timedelta", ") item_id = response.json['data']['id'] # Check if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id,", "# -*- coding: utf-8 -*- from uuid import uuid4 from copy import deepcopy", "'403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status", "response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification period fromdb = self.db.get(auction_id) fromdb =", "str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), )", "Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data", "self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self):", "check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) response", "OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker',", "response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def", "item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'],", "response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) #", "item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id,", "'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) #", "= ('Basic', ('broker', '')) # Trying to create new item response = self.app.post_json(", "in this status ({})\".format(desired_status) ) # Trying to update new item response =", "self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and check listing response", "response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description':", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token']", "str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Update items with batch", "len(data['items']) + 1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy()", "self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker', ''))", "coding: utf-8 -*- from uuid import uuid4 from copy import deepcopy from datetime", "{'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification", "fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id)", "self.set_status('active.tendering') # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id", "self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response =", ") # Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id", "'You can\\'t change items after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data':", "'/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker', ''))", "working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items can`t be", "{'status': 'active.tendering'}}, headers=access_header ) # Update items with batch mode item_2 = deepcopy(self.initial_item_data)", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) +", "Create and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\",", "'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id']", "headers=access_header ) item_id = response.json['data']['id'] # Check if bid invalidated response = self.app.get(", "del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items},", "fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True )", "change items after rectification period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data", "rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 )", "from uuid import uuid4 from copy import deepcopy from datetime import timedelta from", "= auction_id self.set_status('active.tendering') # Create and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data':", "= self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True )", "from datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def", "access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response =", "Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token),", "# Change status in which you can edit auction desired_status = 'active.auction' self.set_status(desired_status)", "= response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering')", "= self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item response =", "creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status,", "'/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and check listing response =", "response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy()", "'')) # Auction creation data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status,", ") self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data", "# Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json')", "self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) ) # Trying", "= {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create and activate bid response", "from copy import deepcopy from datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date from", "response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data =", "data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token =", "patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id),", "= {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Update items", "= {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Item creation", "auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) # Trying to", "self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid", "Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header =", "items can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 )", "{'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization =", "'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get", "owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create", "= self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check", "self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy() response =", "response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) #", "Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id']", "# Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"])", "self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id =", "uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') def", "{'status': 'active'}} ) # Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header", "item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK')", "self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']),", "('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation response", "'/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'],", "('Basic', ('broker', '')) # Trying to create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id),", "self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id'])", "1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction", "'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) # Trying to create new item", "self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Item creation response = self.app.post_json(", ") self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic',", "self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json(", "self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] # Check if bid invalidated response =", "'')) # Trying to create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data},", "= self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']),", "response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403", "'/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items", "{'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created')", "invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check if status changes self.app.patch_json(", "{'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You", "with batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]}", "self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation response =", "'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id),", "status in which you can edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization =", "can edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) #", "response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create and activate", "= self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response", "'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)}", "+ 1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() #", "self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token':", "Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"])", "'201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header", "= self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self):", "response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change", "change items in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker',", "headers=access_header ) # Update items with batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id']", "uuid4 from copy import deepcopy from datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date", "self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex,", "{'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id,", "response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'],", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id,", "be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You", "response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}}", "response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) # Create item response", "bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data':", "Created') self.assertEqual(response.content_type, 'application/json') # Change status in which you can edit auction desired_status", "'/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json')", "# Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id =", "response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type,", "= self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self):", "in which you can edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic',", "response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id,", "Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def", "{'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'],", "self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items']))", "self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) # Create item response =", "self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data", "validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy() response", "'200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic',", "owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header", "check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation response", "def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy()", "import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker', ''))", "headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id,", "= response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change status in which you", "-timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb", "response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}},", "self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', ''))", "= self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token", "Create one item and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header", "after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403", "# Patch item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response =", "headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self):", "check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation response", "you can edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', ''))", "'201 Created') # Change rectification period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate", "response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'],", "self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item", "= self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization = ('Basic',", "items in this status ({})\".format(desired_status) ) # Trying to update new item response", "{'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'],", "access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Update", "self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self):", "self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) self.assertNotEqual(invalidation_date,", "OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response", "'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one", "self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id", "def check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation", "response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change status in which you can", "self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization", "self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type,", "response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json')", "self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'],", "self.assertEqual(response.content_type, 'application/json') # Change status in which you can edit auction desired_status =", "status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation", "# Create and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\":", "{'data': {'status': 'active.tendering'}}, headers=access_header ) # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data':", "self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response", "self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) # Auction", "period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'],", "response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item response", "uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status,", "[self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json')", "self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item response = self.app.patch_json(", "= response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"])", "response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and", "'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy()", "headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item", "Patch item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response = self.app.patch_json(", "self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change", "access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item creation response =", "'201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1)", "in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) #", "= {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item creation response = self.app.post_json(", "'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation", "{'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id),", "calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check", "response.json['data']['id'] # Check if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) )", ") fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items can`t be edited", "response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data", "[self.initial_item_data] # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type,", "item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get(", "if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) response =", "self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response =", "self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker',", "'201 Created') self.assertEqual(response.content_type, 'application/json') # Change status in which you can edit auction", "headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token)", "response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id", "headers=access_header ) # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header )", ") self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and check listing response = self.app.post_json(", "rectification period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items']", "mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response =", "= {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data':", "datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self):", "edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t", "self.app.authorization = ('Basic', ('broker', '')) # Trying to create new item response =", "self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this", "self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization", "self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items']", "creation data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type,", "= response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} )", "AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction", "self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification period", "self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check if", "item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type,", "self.assertEqual(fromdb.id, auction_id) # Check if items can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id),", "status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) response = self.app.get(", "response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') #", "'201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id,", "{'data': {'status': 'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) #", ") response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and", "str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data':", "headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') response =", "from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data =", ") self.assertEqual(response.json['data']['status'], 'active') # Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}},", "# Check if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'],", "this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) # Auction", "'active'}} ) # Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header )", "self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response =", "= self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}} )", "invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id))", "self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data =", "auction_id self.set_status('active.tendering') # Create and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers':", "item_id = response.json['data']['id'] # Check if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id,", "'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header", "'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again", "activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True,", "item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status,", "{'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Item creation response", "'/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification", "Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200", "= self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created')", "self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status,", "edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) # Trying", "'201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker', ''))", "{}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id,", "= self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id", "'/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json')", "= self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json')", "'')) data = self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status,", "item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"])", "check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation response", "True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token']", "deepcopy from datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction", "{'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] # Check if bid invalidated response", "Update items with batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items':", "len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] =", "response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id':", "self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), )", "response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker',", "and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} )", "item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'],", "batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] #", "str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Item creation response =", "self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"])", "= self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and check listing", "owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item", "'active.tendering'}}, headers=access_header ) # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header", "'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id))", ") self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])", "self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data =", "= [self.initial_item_data] # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created')", "response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type,", "= fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items can`t be edited response =", "Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)}", "can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'],", "headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') def batch_create_items(self):", "check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy() response", "can\\'t change items after rectification period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker', ''))", "self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data':", "{'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') #", "{'data': {'status': 'active.tendering'}}, headers=access_header ) # Update items with batch mode item_2 =", "'/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']))", "self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') response", ") self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization", "{'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Update items with", "copy import deepcopy from datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models", "response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type,", "# Trying to create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header,", "\"You can't change items in this status ({})\".format(desired_status) ) # Trying to update", "-*- coding: utf-8 -*- from uuid import uuid4 from copy import deepcopy from", "item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification period fromdb = self.db.get(auction_id)", "change items after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}},", "('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data':", "= {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response = self.app.get(", "self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) #", "'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date =", "data = self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation response = self.app.post_json('/auctions', {'data':", "fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items can`t be edited response", "'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token)", "response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def", "working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db)", "len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data =", "self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] # Check if bid", "def check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation", "data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json')", "self.assertEqual(response.status, '201 Created') # Change rectification period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb)", "response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item creation response", "response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201", "item and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status,", "status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') response = self.app.patch_json(", "creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items']))", "'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status':", "headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization =", "uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't", "'')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation response = self.app.post_json('/auctions',", "patch_items = {'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header )", "self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check if status", "= {'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status,", "items in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', ''))", "Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']),", "{'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification", ") def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data =", "self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json(", "('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions', {'data':", "= ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation", "def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy()", "self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization =", "= self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod'])", ") self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') def batch_create_items(self): self.app.authorization =", "= self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch", "= self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id =", "\"You can't change items in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization =", "fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate =", ") item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location'])", "bidder_id, bid_token), {'data': {'status': 'active'}} ) # Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id),", "= self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created')", "headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change", "'/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id),", "self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item", "{'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period')", "[self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK')", "self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id))", ") fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id,", "= response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create and", "{'data': {'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active')", "self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker',", "'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id =", "self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) # Trying to create new item response", "response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id,", "item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response = self.app.patch_json(", "response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response", "= self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] # Check if", "= response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status':", "access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Item", "'0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK')", "change items in this status ({})\".format(desired_status) ) # Trying to update new item", "'201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'],", "self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'}", "def check_item_creation(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation", "creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id =", "= ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy() response = self.app.post_json('/auctions',", "self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual(", "bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) # Create", "{'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data},", "import timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization", "= response.json['data']['id'] # Check if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token)", "period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb,", "self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data =", "Check if items can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header,", "item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data", "status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') def batch_create_items(self): self.app.authorization", "deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data':", "response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization", "utf-8 -*- from uuid import uuid4 from copy import deepcopy from datetime import", "'200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', ''))", "auction_id self.set_status('active.tendering') # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header )", "response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization =", "self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic',", "({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data", "bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response =", "'You can\\'t change items after rectification period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker',", "str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create and activate bid response = self.app.post_json(", "self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate", "bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] #", "item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header", "('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation response =", "auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id", "'201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token':", "response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate bid again and check if status changes", "response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) ) # Trying to", "'/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id'])", ") # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id", "items after rectification period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data =", "patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def", "create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status,", "= ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions',", "= self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json')", "response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() # Auction", "'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) ) #", "self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') def batch_create_items(self): self.app.authorization = ('Basic',", "= response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header )", "one item and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header )", "self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Patch item patch_data = {'description': 'DESCRIPTION_'", "to update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header,", "self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date", "bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible':", "response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'],", "Change status in which you can edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization", "self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id']", "response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response", "= deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id),", "status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items", "({})\".format(desired_status) ) # Trying to update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id),", "self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after", "'/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201", "# Activate bid again and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token),", "= self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json')", "def batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data]", "bid again and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status':", "'application/json') response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items']) + 1) def check_item_creation(self): self.app.authorization", "= AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date(", "def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy()", "# Auction creation data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201", "self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'],", "which you can edit auction desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker',", "= calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb,", "changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id,", "= response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Item creation", "self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type,", "can't change items in this status ({})\".format(desired_status) ) def validate_change_items_after_rectification_period(self): self.app.authorization = ('Basic',", "def batch_update_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] = [self.initial_item_data]", "self.auction_id = auction_id self.set_status('active.tendering') # Create and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id),", "'/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change", "response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.json['errors'][0]['description'], 'You", ") self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate'] # Activate", "calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(self.initial_item_data['id'], response.json['data']['id']) self.assertIn(item_id, response.headers['Location']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"])", ") self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json(", "Create item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id']", "# Change rectification period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date(", "'/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response =", "self.set_status('active.tendering') # Create and activate bid response = self.app.post_json( '/auctions/{}/bids'.format(auction_id), {'data': {'tenderers': [self.initial_organization],", "item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) def check_item_patch(self): self.app.authorization =", "self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id),", "desired_status = 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) # Trying to create", "self.auction_id = auction_id self.set_status('active.tendering') # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data},", "check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data = self.initial_data.copy() response", "# Check if items can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data},", ") self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in", "batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data, item_2]} response", "# Trying to update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description':", "len(data['items'])) # Create one item and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data':", ") self.assertEqual(response.json['errors'][0]['description'], 'You can\\'t change items after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id,", "{'status': 'active.tendering'}}, headers=access_header ) # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data},", "'application/json') # Change status in which you can edit auction desired_status = 'active.auction'", "bid_token), {'data': {'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'],", "'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id = response.json['data']['id'] bid_token =", "# Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header ) self.assertEqual(response.status,", "data = self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201", "Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status)", "fromdb.tenderPeriod.startDate, timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if", "import deepcopy from datetime import timedelta from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import", "'application/json') self.assertEqual( response.json['errors'][0]['description'], \"You can't change items in this status ({})\".format(desired_status) ) def", "rectification period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15),", ") item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') # Change rectification period fromdb =", "fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items can`t", "fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items can`t be edited response = self.app.post_json(", "self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id,", "self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token =", "{'items': [self.initial_item_data, item_2]} response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200", "= auction_id self.set_status('active.tendering') # Item creation response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header", "can\\'t change items after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description':", "item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden')", "{'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) auction_id = response.json['data']['id'] owner_token", "Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id']) self.assertEqual(self.initial_item_data['description'], response.json[\"data\"][\"description\"]) self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'],", "auction_id) # Check if items can`t be edited response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data':", "+ uuid4().hex, 'id': '0*32'} response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': patch_data}, headers=access_header )", "{'data': patch_data}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"])", "can't change items in this status ({})\".format(desired_status) ) # Trying to update new", "OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def check_bids_invalidation(self): self.app.authorization = ('Basic', ('broker', '')) #", "data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header", "{'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create and activate bid response =", "item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden') self.assertEqual(response.content_type, 'application/json') self.assertEqual(", "'200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid')", "and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201", ") self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) )", "'/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') response = self.app.get(", "headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change status", "response = self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json')", "import uuid4 from copy import deepcopy from datetime import timedelta from openprocurement.auctions.core.utils import", "self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') #", "items after rectification period') response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header,", "self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Update items with batch mode", "\"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') bidder_id =", "{'tenderers': [self.initial_organization], \"status\": \"draft\", 'qualified': True, 'eligible': True}} ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type,", "{'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id),", "'active.tendering'}}, headers=access_header ) # Update items with batch mode item_2 = deepcopy(self.initial_item_data) del", "auction_id = response.json['data']['id'] owner_token = response.json['access']['token'] access_header = {'X-Access-Token': str(owner_token)} self.app.patch_json( '/auctions/{}'.format(auction_id), {'data':", "Activate bid again and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data':", "self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) # Auction creation data", ") response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item", "AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate,", "self.assertEqual(len(response.json['data']), len(data['items'])) # Create one item and check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id),", "new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 )", "update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403", "listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type,", "bidder_id = response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}}", "calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker', '')) data", "Auction creation data = self.initial_data.copy() response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created')", "timedelta(days=7), fromdb, working_days=True ) fromdb = fromdb.store(self.db) self.assertEqual(fromdb.id, auction_id) # Check if items", "access_header = {'X-Access-Token': str(owner_token)} self.auction_id = auction_id self.set_status('active.tendering') # Create and activate bid", "self.assertEqual(self.initial_item_data['quantity'], response.json[\"data\"][\"quantity\"]) self.assertEqual(self.initial_item_data['address'], response.json[\"data\"][\"address\"]) # Get item response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id)) self.assertEqual(item_id, response.json['data']['id'])", "{'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') auction_id = response.json['data']['id'] owner_token = response.json['access']['token']", "new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header, status=403 ) self.assertEqual(response.status, '403", "('broker', '')) # Trying to create new item response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data':", "response.json['data']['id'] bid_token = response.json['access']['token'] self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}} ) #", "{'status': 'active'}} ) response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') #", "again and check if status changes self.app.patch_json( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token), {'data': {'status': 'active'}}", "fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True", "item_id = response.json['data']['id'] self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change status in which", "'/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] # Check if bid invalidated", "Check if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid')", "if bid invalidated response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response", "bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'active') # Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id),", "= self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}}, headers=access_header, status=403 ) self.assertEqual(response.status, '403 Forbidden')", "self.assertEqual(response.json['data']['status'], 'active') # Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header", "uuid import uuid4 from copy import deepcopy from datetime import timedelta from openprocurement.auctions.core.utils", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') # Change status in which you can edit", "response = self.app.get( '/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate',", "openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic', ('broker',", "Created') # Change rectification period fromdb = self.db.get(auction_id) fromdb = AppraisalAuction(fromdb) fromdb.tenderPeriod.startDate =", "self.assertNotEqual(patch_data['id'], response.json['data']['id']) self.assertEqual(patch_data['description'], response.json[\"data\"][\"description\"]) def check_patch_auction_in_not_editable_statuses(self): self.app.authorization = ('Basic', ('broker', '')) # Auction", "= 'active.auction' self.set_status(desired_status) self.app.authorization = ('Basic', ('broker', '')) # Trying to create new", "'/auctions/{}'.format(auction_id), {'data': patch_items}, headers=access_header ) self.assertEqual(response.status, '200 OK') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(patch_items['items'])) def", "self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type, 'application/json') self.assertEqual(len(response.json['data']['items']), len(data['items'])) def batch_update_items(self): self.app.authorization = ('Basic', ('broker',", "Trying to update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {'description': uuid4().hex}},", "'active') # Patch item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data': {}}, headers=access_header )", "self.initial_data.copy() data['items'] = [self.initial_item_data] # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status,", "fromdb.tenderPeriod.startDate = calculate_business_date( fromdb.tenderPeriod.startDate, -timedelta(days=15), fromdb, working_days=True ) fromdb.tenderPeriod.endDate = calculate_business_date( fromdb.tenderPeriod.startDate, timedelta(days=7),", "from openprocurement.auctions.core.utils import calculate_business_date from openprocurement.auctions.appraisal.models import AppraisalAuction def check_items_listing(self): self.app.authorization = ('Basic',", "status ({})\".format(desired_status) ) # Trying to update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id,", "period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy() data['items'] =", "'/auctions/{}'.format(auction_id), {'data': {'status': 'active.tendering'}}, headers=access_header ) # Update items with batch mode item_2", "items with batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items = {'items': [self.initial_item_data,", "response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) item_id = response.json['data']['id'] # Check", "('broker', '')) data = self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions', {'data': data})", "this status ({})\".format(desired_status) ) # Trying to update new item response = self.app.patch_json(", ") # Trying to update new item response = self.app.patch_json( '/auctions/{}/items/{}'.format(auction_id, item_id), {'data':", "after rectification period') def batch_create_items(self): self.app.authorization = ('Basic', ('broker', '')) data = self.initial_data.copy()", "self.initial_data.copy() # Auction creation response = self.app.post_json('/auctions', {'data': data}) self.assertEqual(response.status, '201 Created') self.assertEqual(response.content_type,", ") # Update items with batch mode item_2 = deepcopy(self.initial_item_data) del item_2['id'] patch_items", "'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token) ) self.assertEqual(response.json['data']['status'], 'invalid') response = self.app.get('/auctions/{}'.format(auction_id)) self.assertIn('invalidationDate', response.json['data']['rectificationPeriod']) self.assertNotEqual(invalidation_date, response.json['data']['rectificationPeriod']['invalidationDate'])", "{'status': 'active.tendering'}}, headers=access_header ) response = self.app.get( '/auctions/{}/items'.format(auction_id), ) self.assertEqual(len(response.json['data']), len(data['items'])) # Create", "check listing response = self.app.post_json( '/auctions/{}/items'.format(auction_id), {'data': self.initial_item_data}, headers=access_header ) self.assertEqual(response.status, '201 Created')" ]
[ "allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s'", "response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy =", "True, 'download_timeout': 15 } yield Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self,", "= { 'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 } yield Request(", "Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list:", "proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) > 0 else", "class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i in", "def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0]", "proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http'", "4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for", "'http' if len(protocol) > 0 else 'https' url = '%s://httpbin.org/ip' % protocol proxy", "dont_filter=True ) def check_available(self, response): ip = response.meta['ip'] if ip == json.loads(response.text)['origin']: yield", "json import scrapy from scrapy import Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains", "def check_available(self, response): ip = response.meta['ip'] if ip == json.loads(response.text)['origin']: yield { 'proxy':response.meta['proxy']", "i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy =", "ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) >", "\"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i in range(1, 4): print(i) yield", "Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response): ip = response.meta['ip'] if", "= item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol", "'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 } yield Request( url, callback=self.check_available,", "> 0 else 'https' url = '%s://httpbin.org/ip' % protocol proxy = '%s://%s' %", "= proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) > 0", "url = '%s://httpbin.org/ip' % protocol proxy = '%s://%s' % (protocol, proxy) meta =", "else 'https' url = '%s://httpbin.org/ip' % protocol proxy = '%s://%s' % (protocol, proxy)", "'%s://%s' % (protocol, proxy) meta = { 'ip': ip, 'proxy': proxy, 'dont_retry': True,", "= item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) > 0 else 'https' url =", "url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response): ip = response.meta['ip'] if ip", ") def check_available(self, response): ip = response.meta['ip'] if ip == json.loads(response.text)['origin']: yield {", "base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol)", "= response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip", "meta = { 'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 } yield", "meta=meta, dont_filter=True ) def check_available(self, response): ip = response.meta['ip'] if ip == json.loads(response.text)['origin']:", "print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) > 0 else 'https'", "= 'http' if len(protocol) > 0 else 'https' url = '%s://httpbin.org/ip' % protocol", "= \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i in range(1, 4): print(i)", "import Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for", "base64 import json import scrapy from scrapy import Request class ProxyList(scrapy.Spider): name =", "scrapy import Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self):", "scrapy from scrapy import Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"]", "15 } yield Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response): ip", "from scrapy import Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def", "check_available(self, response): ip = response.meta['ip'] if ip == json.loads(response.text)['origin']: yield { 'proxy':response.meta['proxy'] }", "def start_requests(self): for i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def", "item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) > 0 else 'https' url = '%s://httpbin.org/ip'", "= [\"proxy-list.org\"] def start_requests(self): for i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' %", "'dont_retry': True, 'download_timeout': 15 } yield Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def", "= base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if", "= '%s://httpbin.org/ip' % protocol proxy = '%s://%s' % (protocol, proxy) meta = {", "protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol = 'http' if len(protocol) > 0 else 'https' url", "start_requests(self): for i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self,", "protocol proxy = '%s://%s' % (protocol, proxy) meta = { 'ip': ip, 'proxy':", "'%s://httpbin.org/ip' % protocol proxy = '%s://%s' % (protocol, proxy) meta = { 'ip':", "list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1])", "(protocol, proxy) meta = { 'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15", "'download_timeout': 15 } yield Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response):", "len(protocol) > 0 else 'https' url = '%s://httpbin.org/ip' % protocol proxy = '%s://%s'", "% (protocol, proxy) meta = { 'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout':", "print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item", "proxy, 'dont_retry': True, 'download_timeout': 15 } yield Request( url, callback=self.check_available, meta=meta, dont_filter=True )", "'https' url = '%s://httpbin.org/ip' % protocol proxy = '%s://%s' % (protocol, proxy) meta", "= '%s://%s' % (protocol, proxy) meta = { 'ip': ip, 'proxy': proxy, 'dont_retry':", "item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract() protocol =", "import scrapy from scrapy import Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains =", "% protocol proxy = '%s://%s' % (protocol, proxy) meta = { 'ip': ip,", "list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol =", "coding: utf-8 -*- import base64 import json import scrapy from scrapy import Request", "-*- import base64 import json import scrapy from scrapy import Request class ProxyList(scrapy.Spider):", "} yield Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response): ip =", "i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list", "0 else 'https' url = '%s://httpbin.org/ip' % protocol proxy = '%s://%s' % (protocol,", "range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul')", "parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy", "proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol = item.xpath('.//li[@class=\"https\"]/text()').extract()", "Request class ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i", "[\"proxy-list.org\"] def start_requests(self): for i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i)", "utf-8 -*- import base64 import json import scrapy from scrapy import Request class", "protocol = 'http' if len(protocol) > 0 else 'https' url = '%s://httpbin.org/ip' %", "ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 } yield Request( url, callback=self.check_available, meta=meta,", "yield Request( url, callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response): ip = response.meta['ip']", "item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy)", "-*- coding: utf-8 -*- import base64 import json import scrapy from scrapy import", "'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 } yield Request( url, callback=self.check_available, meta=meta, dont_filter=True", "proxy = '%s://%s' % (protocol, proxy) meta = { 'ip': ip, 'proxy': proxy,", "in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0] print(proxy) protocol", "response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip =", "proxy) meta = { 'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 }", "for item in list: proxy = item.xpath('.//li[@class=\"proxy\"]//script').extract()[0] proxy = base64.b64decode(proxy.split(\"'\")[1]) ip = proxy.split(':')[0]", "callback=self.check_available, meta=meta, dont_filter=True ) def check_available(self, response): ip = response.meta['ip'] if ip ==", "yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in", "for i in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response):", "# -*- coding: utf-8 -*- import base64 import json import scrapy from scrapy", "{ 'ip': ip, 'proxy': proxy, 'dont_retry': True, 'download_timeout': 15 } yield Request( url,", "import base64 import json import scrapy from scrapy import Request class ProxyList(scrapy.Spider): name", "ProxyList(scrapy.Spider): name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i in range(1,", "name = \"proxy_list\" allowed_domains = [\"proxy-list.org\"] def start_requests(self): for i in range(1, 4):", "if len(protocol) > 0 else 'https' url = '%s://httpbin.org/ip' % protocol proxy =", "% i) def parse(self, response): list = response.xpath('//div[@class=\"table-wrap\"]//ul') for item in list: proxy", "in range(1, 4): print(i) yield Request('https://proxy-list.org/english/index.php?p=%s' % i) def parse(self, response): list =", "import json import scrapy from scrapy import Request class ProxyList(scrapy.Spider): name = \"proxy_list\"" ]
[ "\"name\"] #TODO: Add Alarm edited_task = {} for k, v in taskdata.items(): allowed_details.remove(k)", "typewrite(v) user_val = input() edited_task[k] = user_val for details in allowed_details: log(details +", "for k, v in tasks.items(): if str(k) != str(task_id): continue log(\"-------------- Editing Task:", "Add Alarm edited_task = {} for k, v in taskdata.items(): allowed_details.remove(k) log(k +", "def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \" \") def readtasks(task_id):", "f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo'] for k, v in tasks.items(): if", "str(k) != str(task_id): continue log(\"-------------- Editing Task: \" + v['name'] + \" ---------------\",", "figlet=False): print(stylize(string, colored.fg(color)), end = \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f:", "+ v['name'] + \" ---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k] = data", "edited_task = {} for k, v in taskdata.items(): allowed_details.remove(k) log(k + \": \"", "as f: tasklist = f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo'] for k,", "font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as", "{} for k, v in taskdata.items(): allowed_details.remove(k) log(k + \": \" , \"turquoise_2\")", "import colored from colored import stylize # from pyautogui import typewrite def log(string,", "\"spring_green_3a\") print() data = edit(v) tasks[k] = data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\")", "tasklist = f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo'] for k, v in", "f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm edited_task = {}", "import json import colored from colored import stylize # from pyautogui import typewrite", "= edit(v) tasks[k] = data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist)", "continue log(\"-------------- Editing Task: \" + v['name'] + \" ---------------\", \"spring_green_3a\") print() data", "user_val for details in allowed_details: log(details + \": \" , \"turquoise_2\") user_val =", "tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\",", "colored from colored import stylize # from pyautogui import typewrite def log(string, color,", "\"turquoise_2\") typewrite(v) user_val = input() edited_task[k] = user_val for details in allowed_details: log(details", "tasks = tasklist['todo'] for k, v in tasks.items(): if str(k) != str(task_id): continue", "log(details + \": \" , \"turquoise_2\") user_val = input() edited_task[details] = user_val return", "f: tasklist = f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo'] for k, v", "\") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist = json.loads(tasklist)", "colored import stylize # from pyautogui import typewrite def log(string, color, font=\"slant\", figlet=False):", "tasklist['todo'] for k, v in tasks.items(): if str(k) != str(task_id): continue log(\"-------------- Editing", "log(\"-------------- Editing Task: \" + v['name'] + \" ---------------\", \"spring_green_3a\") print() data =", "str(task_id): continue log(\"-------------- Editing Task: \" + v['name'] + \" ---------------\", \"spring_green_3a\") print()", "v in taskdata.items(): allowed_details.remove(k) log(k + \": \" , \"turquoise_2\") typewrite(v) user_val =", "= input() edited_task[k] = user_val for details in allowed_details: log(details + \": \"", "[\"priority\", \"name\"] #TODO: Add Alarm edited_task = {} for k, v in taskdata.items():", "end = \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist = f.read()", "tasks.items(): if str(k) != str(task_id): continue log(\"-------------- Editing Task: \" + v['name'] +", "<gh_stars>0 import json import colored from colored import stylize # from pyautogui import", "taskdata.items(): allowed_details.remove(k) log(k + \": \" , \"turquoise_2\") typewrite(v) user_val = input() edited_task[k]", "def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist = json.loads(tasklist) tasks", "allowed_details.remove(k) log(k + \": \" , \"turquoise_2\") typewrite(v) user_val = input() edited_task[k] =", "\" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist =", "+ \" ---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k] = data tasklist =", "\" ---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k] = data tasklist = json.dumps(tasklist)", "in allowed_details: log(details + \": \" , \"turquoise_2\") user_val = input() edited_task[details] =", "\" , \"turquoise_2\") typewrite(v) user_val = input() edited_task[k] = user_val for details in", ", \"turquoise_2\") typewrite(v) user_val = input() edited_task[k] = user_val for details in allowed_details:", "k, v in taskdata.items(): allowed_details.remove(k) log(k + \": \" , \"turquoise_2\") typewrite(v) user_val", "json import colored from colored import stylize # from pyautogui import typewrite def", "with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add", "print(stylize(string, colored.fg(color)), end = \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist", "json.loads(tasklist) tasks = tasklist['todo'] for k, v in tasks.items(): if str(k) != str(task_id):", "= tasklist['todo'] for k, v in tasks.items(): if str(k) != str(task_id): continue log(\"--------------", "input() edited_task[k] = user_val for details in allowed_details: log(details + \": \" ,", "= json.loads(tasklist) tasks = tasklist['todo'] for k, v in tasks.items(): if str(k) !=", "print() data = edit(v) tasks[k] = data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as", "allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm edited_task = {} for k, v", "data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details =", "edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm edited_task = {} for k,", "v in tasks.items(): if str(k) != str(task_id): continue log(\"-------------- Editing Task: \" +", "f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm edited_task =", "+ \": \" , \"turquoise_2\") typewrite(v) user_val = input() edited_task[k] = user_val for", "colored.fg(color)), end = \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist =", "= user_val for details in allowed_details: log(details + \": \" , \"turquoise_2\") user_val", "as f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm edited_task", "= \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist", "in taskdata.items(): allowed_details.remove(k) log(k + \": \" , \"turquoise_2\") typewrite(v) user_val = input()", "---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k] = data tasklist = json.dumps(tasklist) with", "\" + v['name'] + \" ---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k] =", "def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm edited_task = {} for", "user_val = input() edited_task[k] = user_val for details in allowed_details: log(details + \":", "with open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo']", "stylize # from pyautogui import typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)),", "from pyautogui import typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end =", "log(k + \": \" , \"turquoise_2\") typewrite(v) user_val = input() edited_task[k] = user_val", "= json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"]", "import stylize # from pyautogui import typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string,", "= data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details", "open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO: Add Alarm", "data = edit(v) tasks[k] = data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f:", "edit(v) tasks[k] = data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def", "allowed_details: log(details + \": \" , \"turquoise_2\") user_val = input() edited_task[details] = user_val", "readtasks(task_id): with open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist = json.loads(tasklist) tasks =", "= {} for k, v in taskdata.items(): allowed_details.remove(k) log(k + \": \" ,", "json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata): allowed_details = [\"priority\", \"name\"] #TODO:", "if str(k) != str(task_id): continue log(\"-------------- Editing Task: \" + v['name'] + \"", "open(\"tasks.json\",\"r\") as f: tasklist = f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo'] for", "typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \" \") def", "tasks[k] = data tasklist = json.dumps(tasklist) with open(\"tasks.json\",\"w\") as f: f.write(tasklist) def edit(taskdata):", "= f.read() tasklist = json.loads(tasklist) tasks = tasklist['todo'] for k, v in tasks.items():", "pyautogui import typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \"", "log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \" \") def readtasks(task_id): with", "for k, v in taskdata.items(): allowed_details.remove(k) log(k + \": \" , \"turquoise_2\") typewrite(v)", "details in allowed_details: log(details + \": \" , \"turquoise_2\") user_val = input() edited_task[details]", "Task: \" + v['name'] + \" ---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k]", "Alarm edited_task = {} for k, v in taskdata.items(): allowed_details.remove(k) log(k + \":", "import typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \" \")", "#TODO: Add Alarm edited_task = {} for k, v in taskdata.items(): allowed_details.remove(k) log(k", "\": \" , \"turquoise_2\") typewrite(v) user_val = input() edited_task[k] = user_val for details", "in tasks.items(): if str(k) != str(task_id): continue log(\"-------------- Editing Task: \" + v['name']", "edited_task[k] = user_val for details in allowed_details: log(details + \": \" , \"turquoise_2\")", "= [\"priority\", \"name\"] #TODO: Add Alarm edited_task = {} for k, v in", "from colored import stylize # from pyautogui import typewrite def log(string, color, font=\"slant\",", "Editing Task: \" + v['name'] + \" ---------------\", \"spring_green_3a\") print() data = edit(v)", "!= str(task_id): continue log(\"-------------- Editing Task: \" + v['name'] + \" ---------------\", \"spring_green_3a\")", "v['name'] + \" ---------------\", \"spring_green_3a\") print() data = edit(v) tasks[k] = data tasklist", "for details in allowed_details: log(details + \": \" , \"turquoise_2\") user_val = input()", "color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end = \" \") def readtasks(task_id): with open(\"tasks.json\",\"r\")", "k, v in tasks.items(): if str(k) != str(task_id): continue log(\"-------------- Editing Task: \"", "# from pyautogui import typewrite def log(string, color, font=\"slant\", figlet=False): print(stylize(string, colored.fg(color)), end", "tasklist = json.loads(tasklist) tasks = tasklist['todo'] for k, v in tasks.items(): if str(k)", "+ \": \" , \"turquoise_2\") user_val = input() edited_task[details] = user_val return edited_task" ]
[ "res = __mCalc(k) r = k[0] del k return res, r print(\"Using Dll...\")", "= __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int __mCalc =", "64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13,", "in range(13): k[i] = args[i] res = __jCalc(k) r = k[0] del k", "ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int", "C 库封装 try: from fractension import jCalc, mCalc print(\"Using fractension...\") except: import sys", "print(\"Using fractension...\") except: import sys import os import ctypes # 当前文件所处的文件夹 __curdir =", "= args[i] res = __mCalc(k) r = k[0] del k return res, r", "= os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform == \"win32\": # Windows if \"32", "range(13): k[i] = args[i] res = __jCalc(k) r = k[0] del k return", "] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ]", "= (13 * ctypes.c_double)() for i in range(13): k[i] = args[i] res =", "13, ] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13,", "ctypes.c_int def jCalc(args): k = (13 * ctypes.c_double)() for i in range(13): k[i]", "k[i] = args[i] res = __mCalc(k) r = k[0] del k return res,", "import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform ==", "# 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double *", "fractension import jCalc, mCalc print(\"Using fractension...\") except: import sys import os import ctypes", "__jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype", "ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes", "__jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes", "= None if sys.platform == \"win32\": # Windows if \"32 bit\" in sys.version:", "= ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc", "def jCalc(args): k = (13 * ctypes.c_double)() for i in range(13): k[i] =", "__dll = None if sys.platform == \"win32\": # Windows if \"32 bit\" in", "= args[i] res = __jCalc(k) r = k[0] del k return res, r", "return res, r def mCalc(args): k = (13 * ctypes.c_double)() for i in", "if sys.platform == \"win32\": # Windows if \"32 bit\" in sys.version: __dll =", "jCalc, mCalc print(\"Using fractension...\") except: import sys import os import ctypes # 当前文件所处的文件夹", "k = (13 * ctypes.c_double)() for i in range(13): k[i] = args[i] res", "sys import os import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None", "else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double", "* ctypes.c_double)() for i in range(13): k[i] = args[i] res = __jCalc(k) r", "args[i] res = __jCalc(k) r = k[0] del k return res, r def", "= ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype", "bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir,", "k[0] del k return res, r def mCalc(args): k = (13 * ctypes.c_double)()", "os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform == \"win32\": # Windows if \"32 bit\"", "* 13, ] __mCalc.restype = ctypes.c_int def jCalc(args): k = (13 * ctypes.c_double)()", "[ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int def jCalc(args): k = (13 *", "args[i] res = __mCalc(k) r = k[0] del k return res, r print(\"Using", "= [ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes =", "当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform == \"win32\": # Windows", "# 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform == \"win32\": #", "* ctypes.c_double)() for i in range(13): k[i] = args[i] res = __mCalc(k) r", "* 13, ] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double *", "== \"win32\": # Windows if \"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\"))", "__jCalc(k) r = k[0] del k return res, r def mCalc(args): k =", "for i in range(13): k[i] = args[i] res = __jCalc(k) r = k[0]", "in range(13): k[i] = args[i] res = __mCalc(k) r = k[0] del k", "__dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int def jCalc(args): k", "__curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform == \"win32\": # Windows if", "<gh_stars>10-100 # C 库封装 try: from fractension import jCalc, mCalc print(\"Using fractension...\") except:", "__mCalc.restype = ctypes.c_int def jCalc(args): k = (13 * ctypes.c_double)() for i in", "if \"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll", "import jCalc, mCalc print(\"Using fractension...\") except: import sys import os import ctypes #", "try: from fractension import jCalc, mCalc print(\"Using fractension...\") except: import sys import os", "[ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double", "# C 库封装 try: from fractension import jCalc, mCalc print(\"Using fractension...\") except: import", "13, ] __mCalc.restype = ctypes.c_int def jCalc(args): k = (13 * ctypes.c_double)() for", "res, r def mCalc(args): k = (13 * ctypes.c_double)() for i in range(13):", "= ctypes.c_int __mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype =", "del k return res, r def mCalc(args): k = (13 * ctypes.c_double)() for", "range(13): k[i] = args[i] res = __mCalc(k) r = k[0] del k return", "= ctypes.c_int def jCalc(args): k = (13 * ctypes.c_double)() for i in range(13):", "__dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int __mCalc = __dll.mCalc", "= __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int def jCalc(args):", "ctypes.c_double)() for i in range(13): k[i] = args[i] res = __jCalc(k) r =", "import sys import os import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll =", "ctypes.c_double)() for i in range(13): k[i] = args[i] res = __mCalc(k) r =", "r def mCalc(args): k = (13 * ctypes.c_double)() for i in range(13): k[i]", "jCalc(args): k = (13 * ctypes.c_double)() for i in range(13): k[i] = args[i]", "\"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes =", "] __mCalc.restype = ctypes.c_int def jCalc(args): k = (13 * ctypes.c_double)() for i", "os import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform", "(13 * ctypes.c_double)() for i in range(13): k[i] = args[i] res = __jCalc(k)", "mCalc print(\"Using fractension...\") except: import sys import os import ctypes # 当前文件所处的文件夹 __curdir", "fractension...\") except: import sys import os import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0]", "sys.platform == \"win32\": # Windows if \"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir,", "Windows if \"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位", "mCalc(args): k = (13 * ctypes.c_double)() for i in range(13): k[i] = args[i]", "= k[0] del k return res, r def mCalc(args): k = (13 *", "(13 * ctypes.c_double)() for i in range(13): k[i] = args[i] res = __mCalc(k)", "__dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc =", "ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if sys.platform == \"win32\":", "i in range(13): k[i] = args[i] res = __mCalc(k) r = k[0] del", "# Windows if \"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: #", "in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\"))", "i in range(13): k[i] = args[i] res = __jCalc(k) r = k[0] del", "for i in range(13): k[i] = args[i] res = __mCalc(k) r = k[0]", "\"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll =", "r = k[0] del k return res, r def mCalc(args): k = (13", "库封装 try: from fractension import jCalc, mCalc print(\"Using fractension...\") except: import sys import", "k return res, r def mCalc(args): k = (13 * ctypes.c_double)() for i", "__mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int def jCalc(args): k =", "ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype =", "sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else: # 64位 __dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc", "def mCalc(args): k = (13 * ctypes.c_double)() for i in range(13): k[i] =", "None if sys.platform == \"win32\": # Windows if \"32 bit\" in sys.version: __dll", "import os import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll = None if", "k[i] = args[i] res = __jCalc(k) r = k[0] del k return res,", "= [ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int def jCalc(args): k = (13", "= __jCalc(k) r = k[0] del k return res, r def mCalc(args): k", "\"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int", "\"win32\": # Windows if \"32 bit\" in sys.version: __dll = ctypes.CDLL(os.path.join(__curdir, \"calc.dll\")) else:", "from fractension import jCalc, mCalc print(\"Using fractension...\") except: import sys import os import", "__jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ] __jCalc.restype = ctypes.c_int __mCalc", "__mCalc = __dll.mCalc __mCalc.argtypes = [ctypes.c_double * 13, ] __mCalc.restype = ctypes.c_int def", "res = __jCalc(k) r = k[0] del k return res, r def mCalc(args):", "__dll = ctypes.CDLL(os.path.join(__curdir, \"calc64.dll\")) __jCalc = __dll.jCalc __jCalc.argtypes = [ctypes.c_double * 13, ]", "except: import sys import os import ctypes # 当前文件所处的文件夹 __curdir = os.path.split(os.path.realpath(__file__))[0] __dll" ]
[ "<filename>toolbox/metrics/metrics_base.py<gh_stars>0 from abc import ABC from toolbox.trackable import Trackable class Metrics(Trackable, ABC): pass" ]
[ "abstract Book class is provided. # Write just the MyBook class from abc", "import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title = title", "Write just the MyBook class from abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta):", "class is provided. # Write just the MyBook class from abc import ABCMeta,", "__init__(self, title, author, price): super().__init__(title, author) self.price = price def display(self): print('Title: '", "MyBook(Book): def __init__(self, title, author, price): super().__init__(title, author) self.price = price def display(self):", "abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title =", "title, author): self.title = title self.author = author @abstractmethod def display(self): pass class", "author) self.price = price def display(self): print('Title: ' + self.title) print('Author: ' +", "ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title = title self.author", "price def display(self): print('Title: ' + self.title) print('Author: ' + self.author) print('Price: '", "' + self.author) print('Price: ' + str(self.price)) new_novel = MyBook(input(), input(), int(input())) new_novel.display()", "self.title = title self.author = author @abstractmethod def display(self): pass class MyBook(Book): def", "self.title) print('Author: ' + self.author) print('Price: ' + str(self.price)) new_novel = MyBook(input(), input(),", "class Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title = title self.author = author", "metaclass=ABCMeta): def __init__(self, title, author): self.title = title self.author = author @abstractmethod def", "# Write just the MyBook class from abc import ABCMeta, abstractmethod class Book(object,", "author @abstractmethod def display(self): pass class MyBook(Book): def __init__(self, title, author, price): super().__init__(title,", "class from abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title, author):", "self.price = price def display(self): print('Title: ' + self.title) print('Author: ' + self.author)", "def display(self): print('Title: ' + self.title) print('Author: ' + self.author) print('Price: ' +", "display(self): print('Title: ' + self.title) print('Author: ' + self.author) print('Price: ' + str(self.price))", "= title self.author = author @abstractmethod def display(self): pass class MyBook(Book): def __init__(self,", "def __init__(self, title, author): self.title = title self.author = author @abstractmethod def display(self):", "= author @abstractmethod def display(self): pass class MyBook(Book): def __init__(self, title, author, price):", "MyBook class from abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title,", "abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title = title self.author =", "from abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title", "author): self.title = title self.author = author @abstractmethod def display(self): pass class MyBook(Book):", "' + self.title) print('Author: ' + self.author) print('Price: ' + str(self.price)) new_novel =", "__init__(self, title, author): self.title = title self.author = author @abstractmethod def display(self): pass", "Book class is provided. # Write just the MyBook class from abc import", "self.author = author @abstractmethod def display(self): pass class MyBook(Book): def __init__(self, title, author,", "print('Title: ' + self.title) print('Author: ' + self.author) print('Price: ' + str(self.price)) new_novel", "pass class MyBook(Book): def __init__(self, title, author, price): super().__init__(title, author) self.price = price", "author, price): super().__init__(title, author) self.price = price def display(self): print('Title: ' + self.title)", "= price def display(self): print('Title: ' + self.title) print('Author: ' + self.author) print('Price:", "class MyBook(Book): def __init__(self, title, author, price): super().__init__(title, author) self.price = price def", "is provided. # Write just the MyBook class from abc import ABCMeta, abstractmethod", "just the MyBook class from abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def", "print('Author: ' + self.author) print('Price: ' + str(self.price)) new_novel = MyBook(input(), input(), int(input()))", "Book(object, metaclass=ABCMeta): def __init__(self, title, author): self.title = title self.author = author @abstractmethod", "the MyBook class from abc import ABCMeta, abstractmethod class Book(object, metaclass=ABCMeta): def __init__(self,", "def display(self): pass class MyBook(Book): def __init__(self, title, author, price): super().__init__(title, author) self.price", "super().__init__(title, author) self.price = price def display(self): print('Title: ' + self.title) print('Author: '", "+ self.title) print('Author: ' + self.author) print('Price: ' + str(self.price)) new_novel = MyBook(input(),", "title, author, price): super().__init__(title, author) self.price = price def display(self): print('Title: ' +", "display(self): pass class MyBook(Book): def __init__(self, title, author, price): super().__init__(title, author) self.price =", "@abstractmethod def display(self): pass class MyBook(Book): def __init__(self, title, author, price): super().__init__(title, author)", "provided. # Write just the MyBook class from abc import ABCMeta, abstractmethod class", "price): super().__init__(title, author) self.price = price def display(self): print('Title: ' + self.title) print('Author:", "# abstract Book class is provided. # Write just the MyBook class from", "title self.author = author @abstractmethod def display(self): pass class MyBook(Book): def __init__(self, title,", "def __init__(self, title, author, price): super().__init__(title, author) self.price = price def display(self): print('Title:" ]
[ "1.0 params[0][1] = 0.0 params[1][0] = 0.0 params[1][1] = 1.0 return (params) def", "par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it))", "return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel -", "in all # *copies or substantial portions of the Software. # * #", "linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack,", "flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02", "return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars,", "= hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel()", "return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1", "cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref)", "torch compute_metric = None precompute_metric = None device = \"cpu\" ref_vals = None", "= par_lin[i] cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin,", "pydicom import cv2 import numpy as np import math import glob import time", "Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img -", "name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for", "# * # *THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00 moments[1] =", "j = ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt", "Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time) t=t+it_time df =", "restriction, including without limitation the rights # *to use, copy, modify, merge, publish,", "to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3]))", "= torch.sum(img) # m00 moments[1] = torch.sum(img * l) # m10 moments[2] =", "< mi_b): end=d best_mi = mi_a linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d", "be # counted in the last bin, and not as an outlier. for", "return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10", "glob import time import pandas as pd from torch.multiprocessing import Pool, Process, set_start_method", "move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop()", "the following conditions: # * # *The above copyright notice and this permission", "= 0.0 params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample, bins, smin, smax): D,", "- Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time=", "(ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02))", "compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01)", "patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT Images',", "free of charge, to any person obtaining a copy # *of this software", "default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?',", "* l) # m10 moments[2] = torch.sum(img * (l**2)) # m20 moments[3] =", "ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0]", "\"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device", "help='Path of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results',", "= precompute_mutual_information elif args.metric == \"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif", "i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return", "******************************************/ import os import pydicom import cv2 import numpy as np import math", "end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t,", "params[1][0] = 0.0 params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample, bins, smin, smax):", "be included in all # *copies or substantial portions of the Software. #", "= 0.0 params[1][0] = 0.0 params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample, bins,", "return img_warped def compute_moments(img): moments = torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0]", "end = images_per_thread * (i + 1) if i < num_threads - 1", "= 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for i in range(par_lin.numel()): cur_par", "= torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0,", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # *OUT OF OR IN CONNECTION", "mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel,", "device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa", "parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix", "t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(),", "proper matrix hist = hist.reshape((nbin, nbin)) hist = hist.float() # Remove outliers (indices", "- Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img =", "(t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k in", "outliers (indices 0 and -1 for each dimension). hist = hist[1:-1,1:-1] return hist", "flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(),", "metric!\") exit() global device device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is", "points are on the rightmost edge. on_edge = (sample[i, :] == edges[-1]) #", "#cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom", "return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged = False eps", "t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # *SOFTWARE. # ******************************************/", "= \"cpu\" ref_vals = None move_data = None def no_transfer(input_data): return input_data def", "'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args =", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # *SOFTWARE.", "pool = [] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res", "par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img,", "moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] = torch.sum(img *", "are on the rightmost edge. on_edge = (sample[i, :] == edges[-1]) # Shift", "Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time()", "args.metric == \"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\":", "and assign it to the hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape into", "hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /=", "= torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel,", "(i + 1) if i < num_threads - 1 else len(CT) name =", "rights # *to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params):", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER IN AN ACTION", "if vector_params[2] > 1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0", "Process, set_start_method import struct import statistics import argparse import kornia import torch compute_metric", "href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device ==", "pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float()", "torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2", "assign it to the hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a", "estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa =", "def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(),", "import torch compute_metric = None precompute_metric = None device = \"cpu\" ref_vals =", "a copy # *of this software and associated documentation files (the \"Software\"), to", "ARISING FROM, # *OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "modify, merge, publish, distribute, sublicense, and/or sell # *copies of the Software, and", "last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for i in range(par_lin.numel()): cur_par = par_lin[i]", "left. Ncount[i][on_edge] -= 1 # Compute the sample indices in the flattened histogram", "<filename>powell_torch.py # /****************************************** # *MIT License # * # *Copyright (c) [2021] [<NAME>,", "mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack,", "curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort()", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT", "torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data = no_transfer if device==\"cpu\" else to_cuda for", "href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint =", "= torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data = no_transfer if device==\"cpu\" else to_cuda", "in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET,", "= Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u))", "pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False)", "= torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 =", "= Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time", "pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par):", "= torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack,", "import statistics import argparse import kornia import torch compute_metric = None precompute_metric =", "idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001]", "df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id))", "0 and -1 for each dimension). hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel):", "= pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path,", "i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par,", "1))) # m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] >", "in range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng,", "mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta", "parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target", "None precompute_metric = None device = \"cpu\" ref_vals = None move_data = None", "None, minlength=nbin*nbin) # Shape into a proper matrix hist = hist.reshape((nbin, nbin)) hist", "b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name, curr_res,", "= ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt =", "OTHER # *LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread = len(CT) //", "mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint,", "USE OR OTHER DEALINGS IN THE # *SOFTWARE. # ******************************************/ import os import", "torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref):", "torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel))", "help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients", "Software is # *furnished to do so, subject to the following conditions: #", "t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments = torch.empty(6, device=device) l", "= precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device device = args.device compute_wrapper(args, num_threads)", "flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b", "def compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset, args.patient): pool = [] curr_prefix", "num_threads - 1 else len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end],", "mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i,", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # *OUT OF OR IN CONNECTION WITH", "= 256 dim = 512 def main(): parser = argparse.ArgumentParser(description='Iron software for IR", "c,ij in enumerate(zip(CT, PET)): i = ij[0] j = ij[1] ref = pydicom.dcmread(i)", "dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par", "l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00 moments[1] = torch.sum(img *", "parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path", "permission notice shall be included in all # *copies or substantial portions of", "*Permission is hereby granted, free of charge, to any person obtaining a copy", "into. Ncount = D*[None] for i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :],", "import pandas as pd from torch.multiprocessing import Pool, Process, set_start_method import struct import", "images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients folder', default='./')", "def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu()", "cv2 import numpy as np import math import glob import time import pandas", "Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params", "*copies or substantial portions of the Software. # * # *THE SOFTWARE IS", "params) params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel", "limitation the rights # *to use, copy, modify, merge, publish, distribute, sublicense, and/or", "precompute_metric if args.metric == \"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information elif args.metric", "def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img,", "curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread", "0.0 it_time = 0.0 hist_dim = 256 dim = 512 global ref_vals ref_vals", "optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged = False eps = 0.000005 last_mut=100000.0 it=0", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT NOT LIMITED", "255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return", "and -1 for each dimension). hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href", "len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread) for i in range(num_threads): start =", "points one bin to the left. Ncount[i][on_edge] -= 1 # Compute the sample", "NO EVENT SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped =", "tmp_img.shape[3])) return img_warped def compute_moments(img): moments = torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device)", "return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2]", "t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b =", "parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of", "cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt)", "batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return", "name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare", "m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01 moments[4] = torch.sum(img", "python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient to analyze', default=1, type=int)", "times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1):", "xy = Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions in xy and assign", "mats, eref) if(mi_a < mi_b): end=d best_mi = mi_a linear_par[i]=c else: start=c best_mi", "np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' %", "hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h", "Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) # Using digitize, values that fall on", "= torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] = torch.sum(img * l", "start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path =", "roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho)", "= args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is at the end :)\") if", "\"cpu\" ref_vals = None move_data = None def no_transfer(input_data): return input_data def to_cuda(input_data):", "images_per_thread * i end = images_per_thread * (i + 1) if i <", "kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[] times=[] t", "to the right edge to be # counted in the last bin, and", "any person obtaining a copy # *of this software and associated documentation files", "torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u", "for i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) # Using digitize,", "cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats)", "params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path): for i", "N = sample.shape edges = torch.linspace(smin, smax, bins + 1, device=device) nbin =", "mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments = torch.empty(6, device=device) l =", "# *FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "print(images_per_thread) for i in range(num_threads): start = images_per_thread * i end = images_per_thread", "copyright notice and this permission notice shall be included in all # *copies", "save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset, args.patient): pool = []", "*image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return", "# *MIT License # * # *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>]", "times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset, args.patient): pool", "*in the Software without restriction, including without limitation the rights # *to use,", "patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze', default=0,", "mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref):", "to the left. Ncount[i][on_edge] -= 1 # Compute the sample indices in the", "NONINFRINGEMENT. IN NO EVENT SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "= ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] =", "if i < num_threads - 1 else len(CT) name = \"t%02d\" % (i)", "i, k))) for t in pool: t.start() for t in pool: t.join() hist_dim", "my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo)", "= False eps = 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for i", "the bin number each sample falls into. Ncount = D*[None] for i in", "Compute the sample indices in the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] #", "= cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def", "-1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2))", "= (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 =", "the patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze',", "torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] = torch.sum(img * l *", "sample falls into. Ncount = D*[None] for i in range(D): Ncount[i] = torch.searchsorted(edges,", "params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3:", "Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8", "converged): converged=True it=it+1 for i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i]", "merge, publish, distribute, sublicense, and/or sell # *copies of the Software, and to", "hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def", "type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?',", "def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat)", "* (l**2)) # m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01", "Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\",", "ANY CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2] < -1: mat_params[0][0]=1", "set_start_method import struct import statistics import argparse import kornia import torch compute_metric =", "256 dim = 512 def main(): parser = argparse.ArgumentParser(description='Iron software for IR onto", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING", "this permission notice shall be included in all # *copies or substantial portions", "= torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform =", "compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref)", "def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi", "pd from torch.multiprocessing import Pool, Process, set_start_method import struct import statistics import argparse", "for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def", "files (the \"Software\"), to deal # *in the Software without restriction, including without", "device=device) moments[0] = torch.sum(img) # m00 moments[1] = torch.sum(img * l) # m10", "params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample, bins, smin, smax): D, N =", "in xy and assign it to the hist = torch.bincount(xy, None, minlength=nbin*nbin) #", "i end = images_per_thread * (i + 1) if i < num_threads -", "*Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] # * # *Permission is hereby", "or substantial portions of the Software. # * # *THE SOFTWARE IS PROVIDED", "pool: t.join() hist_dim = 256 dim = 512 def main(): parser = argparse.ArgumentParser(description='Iron", "for t in pool: t.join() hist_dim = 256 dim = 512 def main():", "torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a =", "= flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 =", "values equal to the right edge to be # counted in the last", "= torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] =", "# *copies of the Software, and to permit persons to whom the Software", "MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT Images', default='./') parser.add_argument(\"-pp\",", "help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512,", "the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\",", "= optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform)", "t = 0.0 it_time = 0.0 hist_dim = 256 dim = 512 global", "ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2]", "\"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\": compute_metric =", "global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data = no_transfer if", "torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8,", "rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if", "eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats)", "/****************************************** # *MIT License # * # *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>,", "default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if args.metric", "the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions", "= pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() -", "Using digitize, values that fall on an edge are put in the right", "above copyright notice and this permission notice shall be included in all #", "j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1", "def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2],", "\"--res_path\", nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of //", "bins, smin, smax): D, N = sample.shape edges = torch.linspace(smin, smax, bins +", "Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel", "rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho)", "smax, bins + 1, device=device) nbin = edges.shape[0] + 1 # Compute the", "cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu()", "= mi_a linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2,", "def my_squared_hist2d_t(sample, bins, smin, smax): D, N = sample.shape edges = torch.linspace(smin, smax,", "while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel,", "to any person obtaining a copy # *of this software and associated documentation", "m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] = torch.sum(img", "optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi =", "math import glob import time import pandas as pd from torch.multiprocessing import Pool,", "< -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta", "numpy as np import math import glob import time import pandas as pd", "curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True)", "= precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng,", "k in range(args.offset, args.patient): pool = [] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path)", "type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?',", "analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\",", "of the Software, and to permit persons to whom the Software is #", "+ 1) if i < num_threads - 1 else len(CT) name = \"t%02d\"", "par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params", "b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a", "= rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par", "import time import pandas as pd from torch.multiprocessing import Pool, Process, set_start_method import", "f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time)", "one bin to the left. Ncount[i][on_edge] -= 1 # Compute the sample indices", "the number of repetitions in xy and assign it to the hist =", "= (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img", "default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number", "pool: t.start() for t in pool: t.join() hist_dim = 256 dim = 512", "None move_data = None def no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def", "rightmost bin, we want values equal to the right edge to be #", "parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient", "ref_vals = None move_data = None def no_transfer(input_data): return input_data def to_cuda(input_data): return", "= ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0])", "best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged = False eps = 0.000005", "param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps:", "df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset,", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # *SOFTWARE. #", "- start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path", "EVENT SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\": compute_metric = compute_mse", "the Software is # *furnished to do so, subject to the following conditions:", "k))) for t in pool: t.start() for t in pool: t.join() hist_dim =", "c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name, curr_res, t_id,", "start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time", "right edge to be # counted in the last bin, and not as", "params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0]", "OR OTHER DEALINGS IN THE # *SOFTWARE. # ******************************************/ import os import pydicom", "Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int)", "> 1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else:", "0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for i in range(par_lin.numel()): cur_par =", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "= os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config", "of the patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to", "# Compute the bin number each sample falls into. Ncount = D*[None] for", "os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res)", "*THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\",", "on an edge are put in the right bin. # For the rightmost", "== \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else:", "and associated documentation files (the \"Software\"), to deal # *in the Software without", "and to permit persons to whom the Software is # *furnished to do", "dtype=torch.int, device=device) global move_data move_data = no_transfer if device==\"cpu\" else to_cuda for c,ij", "d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged = False", "if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel()", "whom the Software is # *furnished to do so, subject to the following", "edges.shape[0] + 1 # Compute the bin number each sample falls into. Ncount", "\"Software\"), to deal # *in the Software without restriction, including without limitation the", "= (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 =", "portions of the Software. # * # *THE SOFTWARE IS PROVIDED \"AS IS\",", "import math import glob import time import pandas as pd from torch.multiprocessing import", "it=0 while(not converged): converged=True it=it+1 for i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng", "(mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 =", "a proper matrix hist = hist.reshape((nbin, nbin)) hist = hist.float() # Remove outliers", "last bin, and not as an outlier. for i in range(D): # Find", "Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals,", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] -", "precompute_mutual_information elif args.metric == \"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif args.metric", "time import pandas as pd from torch.multiprocessing import Pool, Process, set_start_method import struct", "is hereby granted, free of charge, to any person obtaining a copy #", "# counted in the last bin, and not as an outlier. for i", "(flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01)", "num_threads print(images_per_thread) for i in range(num_threads): start = images_per_thread * i end =", "eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel,", "eref) if(mi_a < mi_b): end=d best_mi = mi_a linear_par[i]=c else: start=c best_mi =", "elif args.metric == \"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\")", "torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel()", "compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a", "None device = \"cpu\" ref_vals = None move_data = None def no_transfer(input_data): return", "eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init", "choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric", "# *OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error", "mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng", "dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments", "of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results', default='./')", "help='Number of the patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient", "no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images,", "values that fall on an edge are put in the right bin. #", "len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k)))", ":], right=True) # Using digitize, values that fall on an edge are put", "0.0 params[1][0] = 0.0 params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample, bins, smin,", "- Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8", "pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats,", "while(not converged): converged=True it=it+1 for i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng =", "a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient to analyze', default=1,", "PET)): i = ij[0] j = ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16),", "* # *Permission is hereby granted, free of charge, to any person obtaining", "= os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) ==", "if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8,", "/ (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1]", "batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b =", "times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id))", "# m10 moments[2] = torch.sum(img * (l**2)) # m20 moments[3] = torch.sum(img *", "OR OTHER # *LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "t_id, patient_id): final_img=[] times=[] t = 0.0 it_time = 0.0 hist_dim = 256", "pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255", "*AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "IR onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient to", "= torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel,", "converged = False eps = 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for", "(flt_transform) def save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path,", "eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim,", "parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path", "elif args.metric == \"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif args.metric ==", "help='Metric accelerator to be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target", "= -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] =", "linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref)", "hist_dim = 256 dim = 512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device)", "= images_per_thread * (i + 1) if i < num_threads - 1 else", "compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[] times=[] t = 0.0 it_time =", "= params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double()", "\"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config)", "for each dimension). hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel,", "cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats,", "D, N = sample.shape edges = torch.linspace(smin, smax, bins + 1, device=device) nbin", "= batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref)", "torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0", "for i in range(num_threads): start = images_per_thread * i end = images_per_thread *", "moments[2] = torch.sum(img * (l**2)) # m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1))", "OR THE USE OR OTHER DEALINGS IN THE # *SOFTWARE. # ******************************************/ import", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "= my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy", "= 512 def main(): parser = argparse.ArgumentParser(description='Iron software for IR onto a python", "cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return -", "# *to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # *copies", "shall be included in all # *copies or substantial portions of the Software.", "return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a", "-= 1 # Compute the sample indices in the flattened histogram matrix. xy", "smin, smax): D, N = sample.shape edges = torch.linspace(smin, smax, bins + 1,", "torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00 moments[1] = torch.sum(img * l) #", "img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments =", "d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[] times=[]", "move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b): end=d", "'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args()", "cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped =", "t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(),", "ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10)", "the left. Ncount[i][on_edge] -= 1 # Compute the sample indices in the flattened", "# For the rightmost bin, we want values equal to the right edge", "default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\",", "= compute_cc precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\": compute_metric = compute_mse precompute_metric", "return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return", "on_edge = (sample[i, :] == edges[-1]) # Shift these points one bin to", "optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def", "pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for t in pool: t.start() for", "patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be tested', choices=['MI', 'CC',", "(c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] # * # *Permission is hereby granted,", "+ 1 # Compute the bin number each sample falls into. Ncount =", "env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient to analyze', default=1, type=int) parser.add_argument(\"-o\",", "*of this software and associated documentation files (the \"Software\"), to deal # *in", ") # m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5]", "\"--thread_number\", nargs='?', help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path", "precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa,", "// num_threads print(images_per_thread) for i in range(num_threads): start = images_per_thread * i end", "else to_cuda for c,ij in enumerate(zip(CT, PET)): i = ij[0] j = ij[1]", "we want values equal to the right edge to be # counted in", "rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0", "= mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def", "to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze', default=0, type=int)", "= register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time) t=t+it_time", "= j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel *", "# *Permission is hereby granted, free of charge, to any person obtaining a", "indices in the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the number", "cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else:", "ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20", "params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0]", "# Shape into a proper matrix hist = hist.reshape((nbin, nbin)) hist = hist.float()", "= None device = \"cpu\" ref_vals = None move_data = None def no_transfer(input_data):", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # *copies of the", "range(args.offset, args.patient): pool = [] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet =", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER", "deal # *in the Software without restriction, including without limitation the rights #", "the sample indices in the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # *AUTHORS OR", "(Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max()", "want values equal to the right edge to be # counted in the", "do so, subject to the following conditions: # * # *The above copyright", "0.0 hist_dim = 256 dim = 512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int,", "default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?',", "\"--config\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator", "cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img,", "outlier. for i in range(D): # Find which points are on the rightmost", "= torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint,", "that fall on an edge are put in the right bin. # For", "config=args.config for k in range(args.offset, args.patient): pool = [] curr_prefix = args.prefix+str(k) curr_ct", "== \"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information elif args.metric == \"CC\": compute_metric", "# *LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "DAMAGES OR OTHER # *LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "nbin = edges.shape[0] + 1 # Compute the bin number each sample falls", "flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path):", "as an outlier. for i in range(D): # Find which points are on", "d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name, curr_res, t_id, patient_id):", "1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3]))", "= transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)):", "help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT", "args.metric == \"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit()", "= time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time -", "images_per_thread * (i + 1) if i < num_threads - 1 else len(CT)", "j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel", "cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel)", "#cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom =", "# *furnished to do so, subject to the following conditions: # * #", "no_transfer if device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT, PET)): i = ij[0]", "== len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread) for i in range(num_threads): start", "# * # *Permission is hereby granted, free of charge, to any person", "CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread = len(CT) // num_threads", "matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions in xy and", "flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02", "eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(),", "number each sample falls into. Ncount = D*[None] for i in range(D): Ncount[i]", "= torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00 moments[1] = torch.sum(img * l)", "(indices 0 and -1 for each dimension). hist = hist[1:-1,1:-1] return hist def", "last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8):", "dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params)", "compute_wrapper(args, num_threads) print(\"Faber Powell python is at the end :)\") if __name__== \"__main__\":", "// threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients folder', default='./')", "= optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi", "device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT, PET)): i = ij[0] j =", "edge are put in the right bin. # For the rightmost bin, we", "1.0 return (params) def my_squared_hist2d_t(sample, bins, smin, smax): D, N = sample.shape edges", "= [] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res =", "def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(),", "hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel,", "moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01 moments[4] = torch.sum(img *", "mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618)", "== edges[-1]) # Shift these points one bin to the left. Ncount[i][on_edge] -=", "these points one bin to the left. Ncount[i][on_edge] -= 1 # Compute the", "= ij[0] j = ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device)", "cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel,", "estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01", "accelerator to be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device',", "OF ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "# m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1", "= 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b", "converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3),", "len(CT) // num_threads print(images_per_thread) for i in range(num_threads): start = images_per_thread * i", "return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long()", "Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 =", "device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img =", "l.reshape((img.shape[0], 1)) ) # m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) #", "[<NAME>, <NAME>, <NAME>, <NAME>] # * # *Permission is hereby granted, free of", "License # * # *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] # *", "t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img,", "= 512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data =", "to deal # *in the Software without restriction, including without limitation the rights", "not as an outlier. for i in range(D): # Find which points are", "mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs,", "params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel =", "image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2],", "ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho)", "# *SOFTWARE. # ******************************************/ import os import pydicom import cv2 import numpy as", "def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref):", "save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte()))", "start = images_per_thread * i end = images_per_thread * (i + 1) if", "def save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'),", "flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b", "# * # *The above copyright notice and this permission notice shall be", "= cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped", "times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv'", "linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged", "be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'],", "default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number", "-torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0", "flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs,", "best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel,", "PET[start:end], name, curr_res, i, k))) for t in pool: t.start() for t in", "0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel):", "torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref):", "ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img", "Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def", "cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref):", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE AND", "notice and this permission notice shall be included in all # *copies or", "torch.sum(img * l * l.reshape((img.shape[0], 1))) # m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3))", "torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img", "= hist.float() # Remove outliers (indices 0 and -1 for each dimension). hist", "- Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() -", "bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device", "(ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0])", "of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\",", "to the following conditions: # * # *The above copyright notice and this", "mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped =", "including without limitation the rights # *to use, copy, modify, merge, publish, distribute,", "1 # Compute the sample indices in the flattened histogram matrix. xy =", "return (par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu", "precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def", "(sample[i, :] == edges[-1]) # Shift these points one bin to the left.", "end=d best_mi = mi_a linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618)", "1) if i < num_threads - 1 else len(CT) name = \"t%02d\" %", "rng = torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref =", "flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref):", "# *THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "equal to the right edge to be # counted in the last bin,", "flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max()", "rightmost edge. on_edge = (sample[i, :] == edges[-1]) # Shift these points one", "def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init =", "torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0)", "sample[i, :], right=True) # Using digitize, values that fall on an edge are", "input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped", "device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is at the end :)\")", "args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is at the end :)\") if __name__==", "dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments = torch.empty(6, device=device) l = torch.arange(img.shape[0],", "start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin,", "b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b): end=d best_mi", "in range(D): # Find which points are on the rightmost edge. on_edge =", "in pool: t.join() hist_dim = 256 dim = 512 def main(): parser =", "move_data = None def no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images,", "of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET Images',", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # *IMPLIED,", "IN THE # *SOFTWARE. # ******************************************/ import os import pydicom import cv2 import", "flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(),", "dimension). hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href", "256 dim = 512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data", "distribute, sublicense, and/or sell # *copies of the Software, and to permit persons", "*furnished to do so, subject to the following conditions: # * # *The", "torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref)", "* l.reshape((img.shape[0], 1)) ) # m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 )", "it to the hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a proper", "\"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for t in", "conditions: # * # *The above copyright notice and this permission notice shall", "mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped", "in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) # Using digitize, values that", "time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times),", "hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001]", "(end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged = False eps =", "= (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] =", "FROM, # *OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "dim = 512 def main(): parser = argparse.ArgumentParser(description='Iron software for IR onto a", "torch.sum(img) # m00 moments[1] = torch.sum(img * l) # m10 moments[2] = torch.sum(img", "in the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the number of", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A", "# *of this software and associated documentation files (the \"Software\"), to deal #", "return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt =", "histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions in xy", "ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2]", "dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img", "len(CT) == len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread) for i in range(num_threads):", "if device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT, PET)): i = ij[0] j", "= torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device)", "mi_a linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi", "to permit persons to whom the Software is # *furnished to do so,", "= transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref)", "= 0.0 it_time = 0.0 hist_dim = 256 dim = 512 global ref_vals", "mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1", "kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments = torch.empty(6, device=device)", "ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02))", "or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta", "+ 1, device=device) nbin = edges.shape[0] + 1 # Compute the bin number", "# * # *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] # * #", "= (end_time - start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df =", "global compute_metric, precompute_metric if args.metric == \"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information", "% (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for t in pool:", "precompute_metric = precompute_mutual_information elif args.metric == \"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation", "PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread)", "ref_sup_ravel, flt_stack, eref): converged = False eps = 0.000005 last_mut=100000.0 it=0 while(not converged):", "def transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped", "Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 =", "= precompute_cross_correlation elif args.metric == \"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else:", "Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return", "(flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10)", "mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu()", "c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged =", "(params) def my_squared_hist2d_t(sample, bins, smin, smax): D, N = sample.shape edges = torch.linspace(smin,", "mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1]", "nbin)) hist = hist.float() # Remove outliers (indices 0 and -1 for each", "return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel)", "WARRANTY OF ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT NOT LIMITED TO", "print(args.config) print(args) global compute_metric, precompute_metric if args.metric == \"MI\": compute_metric = compute_mi precompute_metric", "/= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"):", "[2021] [<NAME>, <NAME>, <NAME>, <NAME>] # * # *Permission is hereby granted, free", "subject to the following conditions: # * # *The above copyright notice and", "range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name,", "to the hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a proper matrix", "nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients", "nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args)", "compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device device = args.device", "a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats,", "mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float()", "mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img,", "kornia import torch compute_metric = None precompute_metric = None device = \"cpu\" ref_vals", "register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng", "as pd from torch.multiprocessing import Pool, Process, set_start_method import struct import statistics import", "it_time = (end_time - start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df", "substantial portions of the Software. # * # *THE SOFTWARE IS PROVIDED \"AS", "nargs='?', help='Number of the patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting", "Software. # * # *THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs,", "optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False", "params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref", "statistics import argparse import kornia import torch compute_metric = None precompute_metric = None", "os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for", "= (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0]", "transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped =", "compute_metric = compute_mi precompute_metric = precompute_mutual_information elif args.metric == \"CC\": compute_metric = compute_cc", "import numpy as np import math import glob import time import pandas as", "dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\",", "repetitions in xy and assign it to the hist = torch.bincount(xy, None, minlength=nbin*nbin)", "torch.multiprocessing import Pool, Process, set_start_method import struct import statistics import argparse import kornia", "compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device device", "DEALINGS IN THE # *SOFTWARE. # ******************************************/ import os import pydicom import cv2", "torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img,", "SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "m00 moments[1] = torch.sum(img * l) # m10 moments[2] = torch.sum(img * (l**2))", "l * l.reshape((img.shape[0], 1))) # m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1]", "i < num_threads - 1 else len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute,", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER IN AN", "to whom the Software is # *furnished to do so, subject to the", "torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack =", "torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8)", "1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2]", "* (i + 1) if i < num_threads - 1 else len(CT) name", "# Compute the sample indices in the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1]", "folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?',", "params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng = torch.tensor([80.0,", "t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b =", "optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return", "register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time) t=t+it_time df", "= os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread =", "into a proper matrix hist = hist.reshape((nbin, nbin)) hist = hist.float() # Remove", "img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par): tmp_img", "= torch.linspace(smin, smax, bins + 1, device=device) nbin = edges.shape[0] + 1 #", "default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\",", "right bin. # For the rightmost bin, we want values equal to the", "compute_cc precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\": compute_metric = compute_mse precompute_metric =", "pandas as pd from torch.multiprocessing import Pool, Process, set_start_method import struct import statistics", "sell # *copies of the Software, and to permit persons to whom the", "ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data = no_transfer if device==\"cpu\"", "default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be tested', choices=['MI', 'CC', 'MSE'], default='MI')", "args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if args.metric ==", "ij[0] j = ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1", "for k in range(args.offset, args.patient): pool = [] curr_prefix = args.prefix+str(k) curr_ct =", "else: print(\"Unsupported metric!\") exit() global device device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell", "mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8)", "None def no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped", "edge to be # counted in the last bin, and not as an", "Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel, Flt_uint8_ravel, eref): if(device == \"cuda\"): idx_joint", "input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\",", "counted in the last bin, and not as an outlier. for i in", "= compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 =", "hereby granted, free of charge, to any person obtaining a copy # *of", "flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10)", "device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric,", "(end_time - start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())])", "Ref_img = (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img", "def compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[] times=[] t = 0.0 it_time", "0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b =", "def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel", "def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2] < -1:", "cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt", "if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho)", "# m00 moments[1] = torch.sum(img * l) # m10 moments[2] = torch.sum(img *", "patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\",", "eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path): for", "and/or sell # *copies of the Software, and to permit persons to whom", "flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0]", "- flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1]", "Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng =", "person obtaining a copy # *of this software and associated documentation files (the", "precompute_metric = None device = \"cpu\" ref_vals = None move_data = None def", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data = no_transfer", "threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\",", "move_data move_data = no_transfer if device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT, PET)):", "from torch.multiprocessing import Pool, Process, set_start_method import struct import statistics import argparse import", "= torch.sum(img * l * l.reshape((img.shape[0], 1))) # m11 return moments def to_matrix_blocked(vector_params):", "Software, and to permit persons to whom the Software is # *furnished to", "def main(): parser = argparse.ArgumentParser(description='Iron software for IR onto a python env') parser.add_argument(\"-pt\",", "\"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information elif args.metric == \"CC\": compute_metric =", "mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b): end=d best_mi = mi_a", "the right edge to be # counted in the last bin, and not", "to be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu',", "\"--pet_path\", nargs='?', help='Path of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of", "patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if args.metric == \"MI\": compute_metric = compute_mi", "so, subject to the following conditions: # * # *The above copyright notice", "#cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return", "in pool: t.start() for t in pool: t.join() hist_dim = 256 dim =", "and this permission notice shall be included in all # *copies or substantial", "range(num_threads): start = images_per_thread * i end = images_per_thread * (i + 1)", "move_data = no_transfer if device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT, PET)): i", "compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10)", "*MIT License # * # *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] #", "80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u =", "'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if", "compute_moments(img): moments = torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) #", "c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat,", "= kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img): moments = torch.empty(6,", "#cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def", "* # *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] # * # *Permission", "= mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def", "which points are on the rightmost edge. on_edge = (sample[i, :] == edges[-1])", "ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if", "this software and associated documentation files (the \"Software\"), to deal # *in the", "torch.linspace(smin, smax, bins + 1, device=device) nbin = edges.shape[0] + 1 # Compute", "the Software, and to permit persons to whom the Software is # *furnished", "= torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel)", "import os import pydicom import cv2 import numpy as np import math import", "moments[5] = torch.sum(img * l * l.reshape((img.shape[0], 1))) # m11 return moments def", "mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params)", "= ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]=", "argparse.ArgumentParser(description='Iron software for IR onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of", "mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b): end=d best_mi =", "i in range(D): # Find which points are on the rightmost edge. on_edge", "Shape into a proper matrix hist = hist.reshape((nbin, nbin)) hist = hist.float() #", "t.start() for t in pool: t.join() hist_dim = 256 dim = 512 def", "hist = hist.reshape((nbin, nbin)) hist = hist.float() # Remove outliers (indices 0 and", "= compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device device =", "<NAME>, <NAME>] # * # *Permission is hereby granted, free of charge, to", "#Creare cartelle def compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[] times=[] t =", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # *OUT OF OR", "nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions',", "l.reshape((img.shape[0], 1))) # m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2]", "flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans))", "\"cuda\"): idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint", "t_mats) #flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img,", "params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] = 0.0 params[1][1]", "m10 moments[2] = torch.sum(img * (l**2)) # m20 moments[3] = torch.sum(img * l.reshape((img.shape[0],", "# /****************************************** # *MIT License # * # *Copyright (c) [2021] [<NAME>, <NAME>,", "torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0, 1.0])", "the rights # *to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "is # *furnished to do so, subject to the following conditions: # *", "* i end = images_per_thread * (i + 1) if i < num_threads", "cartelle def compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[] times=[] t = 0.0", "def batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def", "the right bin. # For the rightmost bin, we want values equal to", "precompute_cross_correlation elif args.metric == \"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported", "params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) /", "best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat))) mi_a,", "bin, we want values equal to the right edge to be # counted", "Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel", "in the right bin. # For the rightmost bin, we want values equal", "following conditions: # * # *The above copyright notice and this permission notice", "import cv2 import numpy as np import math import glob import time import", "par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def", "curr_res, t_id, patient_id): final_img=[] times=[] t = 0.0 it_time = 0.0 hist_dim =", "# *copies or substantial portions of the Software. # * # *THE SOFTWARE", "t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' %", "the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET Images', default='./')", "<NAME>, <NAME>, <NAME>] # * # *Permission is hereby granted, free of charge,", "range(D): # Find which points are on the rightmost edge. on_edge = (sample[i,", "\"--prefix\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images", "import argparse import kornia import torch compute_metric = None precompute_metric = None device", "it=it+1 for i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i] param_opt, cur_mi", "converged=True it=it+1 for i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i] param_opt,", "THE # *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "hist_dim = 256 dim = 512 def main(): parser = argparse.ArgumentParser(description='Iron software for", "start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats", "to do so, subject to the following conditions: # * # *The above", "= kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par): tmp_img =", "* (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] = torch.sum(img * l * l.reshape((img.shape[0],", "onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient to analyze',", "compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref)", "help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads', default=1,", "mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta", "== \"CC\": compute_metric = compute_cc precompute_metric = precompute_cross_correlation elif args.metric == \"MSE\": compute_metric", "software and associated documentation files (the \"Software\"), to deal # *in the Software", "*OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?',", "PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\",", "ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0,", "curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT)", "Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu())", "os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread = len(CT)", "hist.float() # Remove outliers (indices 0 and -1 for each dimension). hist =", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS BE", "= D*[None] for i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) #", "d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats = move_data(torch.stack((a_mat, b_mat)))", "*FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "an edge are put in the right bin. # For the rightmost bin,", "num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if args.metric == \"MI\": compute_metric =", "Ncount = D*[None] for i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True)", "default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\",", "THE # *SOFTWARE. # ******************************************/ import os import pydicom import cv2 import numpy", "xy and assign it to the hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape", "*LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "EXPRESS OR # *IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "device = \"cpu\" ref_vals = None move_data = None def no_transfer(input_data): return input_data", "torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a proper matrix hist = hist.reshape((nbin, nbin))", "(par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params) params_cpu =", "= (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0] - flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt", "(Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img =", "flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs,", "= torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1", "associated documentation files (the \"Software\"), to deal # *in the Software without restriction,", "name, curr_res, i, k))) for t in pool: t.start() for t in pool:", "Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time =", "j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel)", "copy # *of this software and associated documentation files (the \"Software\"), to deal", "flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref)", "tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu')", "cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img,", "hist.reshape((nbin, nbin)) hist = hist.float() # Remove outliers (indices 0 and -1 for", "ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005):", "1, device=device) nbin = edges.shape[0] + 1 # Compute the bin number each", "= ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 =", "eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params =", "Compute the number of repetitions in xy and assign it to the hist", "documentation files (the \"Software\"), to deal # *in the Software without restriction, including", "my_squared_hist2d_t(sample, bins, smin, smax): D, N = sample.shape edges = torch.linspace(smin, smax, bins", "= torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack", "= compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b): end=d best_mi = mi_a linear_par[i]=c", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # *OUT", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR", "each dimension). hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256)", "the Software without restriction, including without limitation the rights # *to use, copy,", "Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8)", "software for IR onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the", "dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img =", "flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c", "= move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b):", "compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a < mi_b): end=d best_mi = mi_a linear_par[i]=c else:", "num_threads) print(\"Faber Powell python is at the end :)\") if __name__== \"__main__\": main()", "import glob import time import pandas as pd from torch.multiprocessing import Pool, Process,", "#flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(),", "each sample falls into. Ncount = D*[None] for i in range(D): Ncount[i] =", "def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel *", "(t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args,", "= 256 dim = 512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global", "= Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time", "compute_mi precompute_metric = precompute_mutual_information elif args.metric == \"CC\": compute_metric = compute_cc precompute_metric =", "href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref def mutual_information(Ref_uint8_ravel,", "np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv'", "(i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for t in pool: t.start()", "parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if args.metric == \"MI\": compute_metric", "index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset, args.patient): pool =", "without restriction, including without limitation the rights # *to use, copy, modify, merge,", "torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else:", "torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] = 0.0 params[1][1] = 1.0", "mats = move_data(torch.stack((a_mat, b_mat))) mi_a, mi_b = compute_metric(ref_sup_ravel, flt_stack, mats, eref) if(mi_a <", "THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "nargs='?', help='Metric accelerator to be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?',", ") # m02 moments[5] = torch.sum(img * l * l.reshape((img.shape[0], 1))) # m11", "images.shape[3])) return img_warped def transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par =", "(l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] = torch.sum(img * l * l.reshape((img.shape[0], 1)))", "eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin)", "(flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0]", "= parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global compute_metric, precompute_metric if args.metric == \"MI\":", "* l.reshape((img.shape[0], 1))) # m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if", "mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt", "args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm')", "enumerate(zip(CT, PET)): i = ij[0] j = ij[1] ref = pydicom.dcmread(i) Ref_img =", "512 def main(): parser = argparse.ArgumentParser(description='Iron software for IR onto a python env')", "on the rightmost edge. on_edge = (sample[i, :] == edges[-1]) # Shift these", "= image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\",", "it_time = 0.0 hist_dim = 256 dim = 512 global ref_vals ref_vals =", "OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "main(): parser = argparse.ArgumentParser(description='Iron software for IR onto a python env') parser.add_argument(\"-pt\", \"--patient\",", "return (flt_transform) def save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.')", "for t in pool: t.start() for t in pool: t.join() hist_dim = 256", "help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be", "= compute_mi precompute_metric = precompute_mutual_information elif args.metric == \"CC\": compute_metric = compute_cc precompute_metric", "bin. # For the rightmost bin, we want values equal to the right", "0.0 params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample, bins, smin, smax): D, N", "args.patient): pool = [] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path)", "Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u,", "ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data = no_transfer if device==\"cpu\" else", "mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu()", "t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(),", "granted, free of charge, to any person obtaining a copy # *of this", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # *OUT OF OR IN", "sublicense, and/or sell # *copies of the Software, and to permit persons to", "= batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b", "dim = 512 global ref_vals ref_vals = torch.ones(dim*dim, dtype=torch.int, device=device) global move_data move_data", "cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT, PET, name, curr_res, t_id, patient_id): final_img=[]", "params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]=", "cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def", "i in range(num_threads): start = images_per_thread * i end = images_per_thread * (i", "final_img.append(f_img.cpu()) it_time = (end_time - start_time) times.append(it_time) t=t+it_time df = pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())])", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER IN", "* # *The above copyright notice and this permission notice shall be included", "linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par)", "device=device) nbin = edges.shape[0] + 1 # Compute the bin number each sample", "an outlier. for i in range(D): # Find which points are on the", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # *AUTHORS OR COPYRIGHT", "= torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def", "compute_metric, precompute_metric if args.metric == \"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information elif", "os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert len(CT) == len(PET)", "mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8,", "publish, distribute, sublicense, and/or sell # *copies of the Software, and to permit", "parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\",", "args.metric == \"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information elif args.metric == \"CC\":", "mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref):", "the rightmost bin, we want values equal to the right edge to be", "*to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # *copies of", "Compute the bin number each sample falls into. Ncount = D*[None] for i", "pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK,", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT NOT", "default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\",", "kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par): tmp_img = image.reshape((1,", "smax): D, N = sample.shape edges = torch.linspace(smin, smax, bins + 1, device=device)", "as np import math import glob import time import pandas as pd from", "flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10", "mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618)", "# m02 moments[5] = torch.sum(img * l * l.reshape((img.shape[0], 1))) # m11 return", "TORT OR OTHERWISE, ARISING FROM, # *OUT OF OR IN CONNECTION WITH THE", "# Remove outliers (indices 0 and -1 for each dimension). hist = hist[1:-1,1:-1]", "# m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02 moments[5] =", "# m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01 moments[4] =", "= (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 =", "eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref):", "without limitation the rights # *to use, copy, modify, merge, publish, distribute, sublicense,", "os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort() assert", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS", "flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01", "flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs,", "par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8,", "Flt_uint8, params) params_cpu = params.cpu() rng = torch.tensor([80.0, 80.0, 1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]])", "put in the right bin. # For the rightmost bin, we want values", "- Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8,", "D*[None] for i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) # Using", "= sample.shape edges = torch.linspace(smin, smax, bins + 1, device=device) nbin = edges.shape[0]", "params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1]", "flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20", "= torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def", "nargs='?', help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the", "of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be tested', choices=['MI',", "charge, to any person obtaining a copy # *of this software and associated", "t in pool: t.join() hist_dim = 256 dim = 512 def main(): parser", "* # *THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "- cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2)", "delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] =", "Shift these points one bin to the left. Ncount[i][on_edge] -= 1 # Compute", "else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h =", "*IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR", "moments[0] = torch.sum(img) # m00 moments[1] = torch.sum(img * l) # m10 moments[2]", "def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return eref", "t.join() hist_dim = 256 dim = 512 def main(): parser = argparse.ArgumentParser(description='Iron software", "np import math import glob import time import pandas as pd from torch.multiprocessing", "device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img = (Ref_img", "cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref) return cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img,", "device=device) global move_data move_data = no_transfer if device==\"cpu\" else to_cuda for c,ij in", "ref_sup_ravel, flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par", "hist = hist.float() # Remove outliers (indices 0 and -1 for each dimension).", "Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img,", "nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to", "#print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8,", "the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\",", "1)) ) # m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0], 1)))**2 ) # m02", "the last bin, and not as an outlier. for i in range(D): #", "nargs='?', help='Path of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the", "copy, modify, merge, publish, distribute, sublicense, and/or sell # *copies of the Software,", "nargs='?', help='Number of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of", "struct import statistics import argparse import kornia import torch compute_metric = None precompute_metric", "of charge, to any person obtaining a copy # *of this software and", "falls into. Ncount = D*[None] for i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i,", "def no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars): img_warped =", "import Pool, Process, set_start_method import struct import statistics import argparse import kornia import", "= mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par,", "*SOFTWARE. # ******************************************/ import os import pydicom import cv2 import numpy as np", "the Software. # * # *THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "= torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a proper matrix hist = hist.reshape((nbin,", "# *in the Software without restriction, including without limitation the rights # *to", "bin, and not as an outlier. for i in range(D): # Find which", "ANY KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "KIND, EXPRESS OR # *IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "PET.sort() CT.sort() assert len(CT) == len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread) for", "OTHERWISE, ARISING FROM, # *OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "for i in range(D): # Find which points are on the rightmost edge.", "m02 moments[5] = torch.sum(img * l * l.reshape((img.shape[0], 1))) # m11 return moments", "to_cuda for c,ij in enumerate(zip(CT, PET)): i = ij[0] j = ij[1] ref", "\"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device) estimate_initial(Ref_uint8, Flt_uint8, params)", "= compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 =", "def compute_moments(img): moments = torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img)", "times=[] t = 0.0 it_time = 0.0 hist_dim = 256 dim = 512", "flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path): for i in", "1.0]) pa = torch.tensor([params_cpu[0][2],params_cpu[1][2],params_cpu[0][0]]) Ref_uint8_ravel = Ref_uint8.ravel().double() eref = precompute_metric(Ref_uint8_ravel) flt_u = torch.unsqueeze(Flt_uint8,", "img_warped def compute_moments(img): moments = torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0] =", "1 else len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res,", "folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be tested', choices=['MI', 'CC', 'MSE'],", "for c,ij in enumerate(zip(CT, PET)): i = ij[0] j = ij[1] ref =", "moments[1] = torch.sum(img * l) # m10 moments[2] = torch.sum(img * (l**2)) #", "= mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped", "compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset, args.patient): pool = [] curr_prefix =", "return img_warped def transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par,", "idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint =", "Ncount[i][on_edge] -= 1 # Compute the sample indices in the flattened histogram matrix.", "exit() global device device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is at", "flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform", "global move_data move_data = no_transfer if device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT,", "OR # *IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "flt_mom[3]/flt_mom[0] rho_flt=0.5*torch.atan((2.0*flt_mu_11)/(flt_mu_20-flt_mu_02)) rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] =", "# ******************************************/ import os import pydicom import cv2 import numpy as np import", "mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par,", "final_img=[] times=[] t = 0.0 it_time = 0.0 hist_dim = 256 dim =", "default='./') parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix", "% (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def", "* Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass", "return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats, cc_ref): flt_warped = batch_transform(flt_imgs, t_mats) cc_a", "\"--ct_path\", nargs='?', help='Path of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of", "\"--offset\", nargs='?', help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of", "moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or vector_params[2] <", "choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\", nargs='?', help='Target device', choices=['cpu', 'cuda'], default='cpu') args", "device device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is at the end", "= torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] = 0.0 params[1][1] =", "end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par) mats =", "flt_u = torch.unsqueeze(Flt_uint8, dim=0).float() flt_stack = torch.stack((flt_u, flt_u)) optimal_params = optimize_powell(rng, pa, Ref_uint8_ravel,", "of // threads', default=1, type=int) parser.add_argument(\"-px\", \"--prefix\", nargs='?', help='prefix Path of patients folder',", "= (sample[i, :] == edges[-1]) # Shift these points one bin to the", "eps = 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for i in range(par_lin.numel()):", "nargs='?', help='Path of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads',", "edges[-1]) # Shift these points one bin to the left. Ncount[i][on_edge] -= 1", "bins + 1, device=device) nbin = edges.shape[0] + 1 # Compute the bin", "Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255", "matrix hist = hist.reshape((nbin, nbin)) hist = hist.float() # Remove outliers (indices 0", "device=device) l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00 moments[1] = torch.sum(img", "precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs,", "flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng,", "transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name, res_path): for i in range(len(OUT_STAK)): b=name[i].split('/')", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # *OUT OF", "bin to the left. Ncount[i][on_edge] -= 1 # Compute the sample indices in", "TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?',", "notice shall be included in all # *copies or substantial portions of the", "THE USE OR OTHER DEALINGS IN THE # *SOFTWARE. # ******************************************/ import os", "print(\"Unsupported metric!\") exit() global device device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell python", "Software without restriction, including without limitation the rights # *to use, copy, modify,", "= torch.searchsorted(edges, sample[i, :], right=True) # Using digitize, values that fall on an", "permit persons to whom the Software is # *furnished to do so, subject", "fall on an edge are put in the right bin. # For the", "all # *copies or substantial portions of the Software. # * # *THE", "hist def precompute_mutual_information(Ref_uint8_ravel): href = torch.histc(Ref_uint8_ravel, bins=256) href /= Ref_uint8_ravel.numel() href=href[href>0.000000000000001] eref=(torch.sum(href*(torch.log2(href))))*-1 return", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # *AUTHORS", "torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img", "= hist.reshape((nbin, nbin)) hist = hist.float() # Remove outliers (indices 0 and -1", "Flt_img.round().type(torch.uint8) start_time = time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time =", "hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a proper matrix hist =", "right=True) # Using digitize, values that fall on an edge are put in", "batch_transform(flt_imgs, t_mats) mse_a = mean_squared_error(ref_img, flt_warped[0].ravel(), mse_ref) mse_b = mean_squared_error(ref_img, flt_warped[1].ravel(), mse_ref) return", "print(args) global compute_metric, precompute_metric if args.metric == \"MI\": compute_metric = compute_mi precompute_metric =", "torch.searchsorted(edges, sample[i, :], right=True) # Using digitize, values that fall on an edge", "Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions in xy and assign it to", "moments = torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00", "the hist = torch.bincount(xy, None, minlength=nbin*nbin) # Shape into a proper matrix hist", "of the Software. # * # *THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init", "= pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img =", "Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8) start_time = time.time()", "FOR ANY CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER IN AN ACTION OF", "= torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01 moments[4] = torch.sum(img * (l.reshape((img.shape[0],", "# Find which points are on the rightmost edge. on_edge = (sample[i, :]", "PET, name, curr_res, t_id, patient_id): final_img=[] times=[] t = 0.0 it_time = 0.0", "par_lin, ref_sup_ravel, flt_stack, eref): converged = False eps = 0.000005 last_mut=100000.0 it=0 while(not", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # *LIABILITY,", "# Compute the number of repetitions in xy and assign it to the", "minlength=nbin*nbin) # Shape into a proper matrix hist = hist.reshape((nbin, nbin)) hist =", "curr_res, i, k))) for t in pool: t.start() for t in pool: t.join()", "def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack, eref): converged = False eps = 0.000005 last_mut=100000.0", "i, eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d", "*copies of the Software, and to permit persons to whom the Software is", "number of repetitions in xy and assign it to the hist = torch.bincount(xy,", "= len(CT) // num_threads print(images_per_thread) for i in range(num_threads): start = images_per_thread *", "the rightmost edge. on_edge = (sample[i, :] == edges[-1]) # Shift these points", "if args.metric == \"MI\": compute_metric = compute_mi precompute_metric = precompute_mutual_information elif args.metric ==", "j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init =", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE", "= None def no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True) def batch_transform(images, pars):", "of the Results', default='./') parser.add_argument(\"-t\", \"--thread_number\", nargs='?', help='Number of // threads', default=1, type=int)", "index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k in range(args.offset, args.patient):", "to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT Images', default='./')", "IN NO EVENT SHALL THE # *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "= None move_data = None def no_transfer(input_data): return input_data def to_cuda(input_data): return input_data.cuda(non_blocking=True)", "precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device device = args.device compute_wrapper(args, num_threads) print(\"Faber", "CT.sort() assert len(CT) == len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread) for i", "eref): start=par-0.382*rng end=par+0.618*rng c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) best_mi = 0.0 while(math.fabs(c-d)>0.005): linear_par[i]=c a_mat=to_matrix_blocked(linear_par) linear_par[i]=d b_mat=to_matrix_blocked(linear_par)", "tmp_img = image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par,", "patient_id): final_img=[] times=[] t = 0.0 it_time = 0.0 hist_dim = 256 dim", "Ref_uint8_ravel, flt_stack, eref) params_trans=to_matrix_blocked(optimal_params) flt_transform = transform(Flt_uint8, move_data(params_trans)) return (flt_transform) def save_data(OUT_STAK, name,", "range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel,", "<NAME>] # * # *Permission is hereby granted, free of charge, to any", "cur_par = par_lin[i] cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack,", "- 1 else len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name,", "last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params =", "df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path,", "(flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0] = torch.sin(delta_rho) params[1][1] =", "linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def", "to be # counted in the last bin, and not as an outlier.", "torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt", "torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim,", "digitize, values that fall on an edge are put in the right bin.", "% (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False) save_data(final_img,PET,curr_res) def compute_wrapper(args, num_threads=1): config=args.config for k", "= \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for t", "parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\",", "sample.shape edges = torch.linspace(smin, smax, bins + 1, device=device) nbin = edges.shape[0] +", "for i in range(par_lin.numel()): cur_par = par_lin[i] cur_rng = rng[i] param_opt, cur_mi =", "Pool, Process, set_start_method import struct import statistics import argparse import kornia import torch", "Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric accelerator to be tested',", "res_path): for i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle", "mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom", "are put in the right bin. # For the rightmost bin, we want", "pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16),", "# Shift these points one bin to the left. Ncount[i][on_edge] -= 1 #", "m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0] mat_params[1][2]=vector_params[1] if vector_params[2] > 1 or", "i in range(len(OUT_STAK)): b=name[i].split('/') c=b.pop() d=c.split('.') cv2.imwrite(os.path.join(res_path, d[0][0:2]+str(int(d[0][2:5]))+'.png'), kornia.tensor_to_image(OUT_STAK[i].cpu().byte())) #Creare cartelle def compute(CT,", "= torch.stack((Ref_uint8_ravel, Flt_uint8_ravel)).long() j_h_init = torch.sparse.IntTensor(idx_joint, ref_vals, torch.Size([hist_dim, hist_dim])).to_dense()/Ref_uint8_ravel.numel() else: idx_joint = torch.stack((Ref_uint8_ravel,", "bin number each sample falls into. Ncount = D*[None] for i in range(D):", "Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img - Flt_img.min())/(Flt_img.max() - Flt_img.min())*255 Flt_uint8 = Flt_img.round().type(torch.uint8)", "mi_b): end=d best_mi = mi_a linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618)", "< num_threads - 1 else len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end],", "global device device = args.device compute_wrapper(args, num_threads) print(\"Faber Powell python is at the", "else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] = 0.0 params[1][1] = 1.0 return", "1)))**2 ) # m02 moments[5] = torch.sum(img * l * l.reshape((img.shape[0], 1))) #", "in range(num_threads): start = images_per_thread * i end = images_per_thread * (i +", "torch.sin(delta_rho) params[1][1] = torch.cos(delta_rho) else: params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] = 0.0", ":] == edges[-1]) # Shift these points one bin to the left. Ncount[i][on_edge]", "pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path =", "= torch.sum(img * (l**2)) # m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) )", "\"--patient\", nargs='?', help='Number of the patient to analyze', default=1, type=int) parser.add_argument(\"-o\", \"--offset\", nargs='?',", "flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions in", "= flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11 = (flt_mom[5]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_10) ref_avg_10 =", "i in range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) # Using digitize, values", "flt_stack, mats, eref) if(mi_a < mi_b): end=d best_mi = mi_a linear_par[i]=c else: start=c", "= batch_transform(flt_imgs, t_mats) cc_a = cross_correlation(ref_img, flt_warped[0].ravel(), cc_ref) cc_b = cross_correlation(ref_img, flt_warped[1].ravel(), cc_ref)", "= os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm') PET=glob.glob(curr_pet+'/*dcm') PET.sort() CT.sort()", "torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt = torch.sum(Flt_uint8_ravel * Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel):", "in the last bin, and not as an outlier. for i in range(D):", "par_lin[i] cur_rng = rng[i] param_opt, cur_mi = optimize_goldsearch(cur_par, cur_rng, ref_sup_ravel, flt_stack, par_lin, i,", "i = ij[0] j = ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16,", "= args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path) os.makedirs(curr_res,exist_ok=True) CT=glob.glob(curr_ct+'/*dcm')", "in range(args.offset, args.patient): pool = [] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet", "return (params) def my_squared_hist2d_t(sample, bins, smin, smax): D, N = sample.shape edges =", "flt_avg_10 = flt_mom[1]/flt_mom[0] flt_avg_01 = flt_mom[3]/flt_mom[0] flt_mu_20 = (flt_mom[2]/flt_mom[0]*1.0)-(flt_avg_10*flt_avg_10) flt_mu_02 = (flt_mom[4]/flt_mom[0]*1.0)-(flt_avg_01*flt_avg_01) flt_mu_11", "nargs='?', help='Path of the PET Images', default='./') parser.add_argument(\"-rp\", \"--res_path\", nargs='?', help='Path of the", "persons to whom the Software is # *furnished to do so, subject to", "Find which points are on the rightmost edge. on_edge = (sample[i, :] ==", "parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients folder', default='./') parser.add_argument(\"-mtr\", \"--metric\", nargs='?', help='Metric", "WARRANTIES OF MERCHANTABILITY, # *FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "parser.add_argument(\"-o\", \"--offset\", nargs='?', help='Starting patient to analyze', default=0, type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path", "help='Path of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET", "best_mi = mi_a linear_par[i]=c else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return", "assert len(CT) == len(PET) images_per_thread = len(CT) // num_threads print(images_per_thread) for i in", "params[0][0]= 1.0 params[0][1] = 0.0 params[1][0] = 0.0 params[1][1] = 1.0 return (params)", "* Flt_uint8_ravel) return - cc_ref_flt/torch.sqrt(cc_ref*cc_flt) def precompute_mean_squared_error(Ref_uint8_ravel): pass def mean_squared_error(Ref_uint8_ravel, Flt_uint8_ravel, mse_ref): return", "help='Target device', choices=['cpu', 'cuda'], default='cpu') args = parser.parse_args() num_threads=args.thread_number patient_number=args.patient print(args.config) print(args) global", "else len(CT) name = \"t%02d\" % (i) pool.append(Process(target=compute, args=(CT[start:end], PET[start:end], name, curr_res, i,", "args=(CT[start:end], PET[start:end], name, curr_res, i, k))) for t in pool: t.start() for t", "OR OTHERWISE, ARISING FROM, # *OUT OF OR IN CONNECTION WITH THE SOFTWARE", "= argparse.ArgumentParser(description='Iron software for IR onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number", "# *Copyright (c) [2021] [<NAME>, <NAME>, <NAME>, <NAME>] # * # *Permission is", "torch.sum(img * l) # m10 moments[2] = torch.sum(img * (l**2)) # m20 moments[3]", "t in pool: t.start() for t in pool: t.join() hist_dim = 256 dim", "-1 for each dimension). hist = hist[1:-1,1:-1] return hist def precompute_mutual_information(Ref_uint8_ravel): href =", "torch.unsqueeze(par, dim=0) img_warped = kornia.geometry.warp_affine(tmp_img, t_par, mode=\"nearest\", dsize=(tmp_img.shape[2], tmp_img.shape[3])) return img_warped def compute_moments(img):", "ij[1] ref = pydicom.dcmread(i) Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j)", "img_warped def transform(image, par): tmp_img = image.reshape((1, 1, *image.shape)).float() t_par = torch.unsqueeze(par, dim=0)", "flt_stack, par_lin, i, eref) par_lin[i]=cur_par if last_mut-cur_mi>eps: par_lin[i]=param_opt last_mut=cur_mi converged=False else: par_lin[i]=cur_par #print(\"Iterations", "flt_stack, eref): converged = False eps = 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True", "*The above copyright notice and this permission notice shall be included in all", "* l * l.reshape((img.shape[0], 1))) # m11 return moments def to_matrix_blocked(vector_params): mat_params=torch.empty((2,3)) mat_params[0][2]=vector_params[0]", "name, curr_res, t_id, patient_id): final_img=[] times=[] t = 0.0 it_time = 0.0 hist_dim", "= torch.sum(img * l) # m10 moments[2] = torch.sum(img * (l**2)) # m20", "for IR onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?', help='Number of the patient", "= 1.0 return (params) def my_squared_hist2d_t(sample, bins, smin, smax): D, N = sample.shape", "= (Ref_img - Ref_img.min())/(Ref_img.max() - Ref_img.min())*255 Ref_uint8 = Ref_img.round().type(torch.uint8) Flt_img = (Flt_img -", "= no_transfer if device==\"cpu\" else to_cuda for c,ij in enumerate(zip(CT, PET)): i =", "= pd.DataFrame([t, np.mean(times), np.std(times)],columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) times_df = pd.DataFrame(times,columns=['Test'+str(patient_id)])#+str(config)accel_id.get_config())]) df_path = os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path", "and not as an outlier. for i in range(D): # Find which points", "\"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of", "= os.path.join(curr_res,'Time_powll_%02d.csv' % (t_id)) times_df_path = os.path.join(curr_res,'Img_powll_%02d.csv' % (t_id)) df.to_csv(df_path, index=False) times_df.to_csv(times_df_path, index=False)", "= mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng, par_lin, ref_sup_ravel, flt_stack,", "pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image, par): tmp_img = image.reshape((1, 1,", "else: start=c best_mi = mi_b linear_par[i]=d c=(end-(end-start)/1.618) d=(start+(end-start)/1.618) return (end+start)/2, best_mi def optimize_powell(rng,", "\"--metric\", nargs='?', help='Metric accelerator to be tested', choices=['MI', 'CC', 'MSE'], default='MI') parser.add_argument(\"-dvc\", \"--device\",", "torch.sum(img * (l**2)) # m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) ) #", "eref): converged = False eps = 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1", "CLAIM, DAMAGES OR OTHER # *LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "in enumerate(zip(CT, PET)): i = ij[0] j = ij[1] ref = pydicom.dcmread(i) Ref_img", "rho_ref=0.5*torch.atan((2.0*ref_mu_11)/(ref_mu_20-ref_mu_02)) delta_rho=rho_ref-rho_flt roundness=(flt_mom[2]/flt_mom[0]) / (flt_mom[4]/flt_mom[0]) if torch.abs(roundness-1.0)>=0.3: params[0][0]= torch.cos(delta_rho) params[0][1] = -torch.sin(delta_rho) params[1][0]", "batch_transform(images, pars): img_warped = kornia.geometry.warp_affine(images, pars, mode=\"nearest\", dsize=(images.shape[2], images.shape[3])) return img_warped def transform(image,", "of repetitions in xy and assign it to the hist = torch.bincount(xy, None,", "CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?', help='Path of the PET Images', default='./') parser.add_argument(\"-rp\",", "# *AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "return mse_a.cpu(), mse_b.cpu() def optimize_goldsearch(par, rng, ref_sup_ravel, flt_stack, linear_par, i, eref): start=par-0.382*rng end=par+0.618*rng", "= None precompute_metric = None device = \"cpu\" ref_vals = None move_data =", "help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path of patients folder',", "1 # Compute the bin number each sample falls into. Ncount = D*[None]", "(the \"Software\"), to deal # *in the Software without restriction, including without limitation", "* Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel * Flt_uint8_ravel) cc_flt =", "sample indices in the flattened histogram matrix. xy = Ncount[0]*nbin+Ncount[1] # Compute the", "range(D): Ncount[i] = torch.searchsorted(edges, sample[i, :], right=True) # Using digitize, values that fall", "import struct import statistics import argparse import kornia import torch compute_metric = None", "return torch.sum((Ref_uint8_ravel - Flt_uint8_ravel)**2) def compute_mi(ref_img, flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats)", "transform(flt_img, t_mat) mi_a = mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return", "included in all # *copies or substantial portions of the Software. # *", "precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel, cc_ref): cc_ref_flt = torch.sum(Ref_uint8_ravel *", "edge. on_edge = (sample[i, :] == edges[-1]) # Shift these points one bin", "== \"MSE\": compute_metric = compute_mse precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global", "obtaining a copy # *of this software and associated documentation files (the \"Software\"),", "import kornia import torch compute_metric = None precompute_metric = None device = \"cpu\"", "# Using digitize, values that fall on an edge are put in the", "else: par_lin[i]=cur_par #print(\"Iterations \"+str(it)) return (par_lin) def register_images(Ref_uint8, Flt_uint8): params = torch.empty((2,3), device=device)", "if(mi_a < mi_b): end=d best_mi = mi_a linear_par[i]=c else: start=c best_mi = mi_b", "mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8)", "Ref_img = torch.tensor(ref.pixel_array.astype(np.int16), dtype=torch.int16, device=device) Ref_img[Ref_img==-2000]=1 flt = pydicom.dcmread(j) Flt_img = torch.tensor(flt.pixel_array.astype(np.int16), dtype=torch.int16,", "l) # m10 moments[2] = torch.sum(img * (l**2)) # m20 moments[3] = torch.sum(img", "# *The above copyright notice and this permission notice shall be included in", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "argparse import kornia import torch compute_metric = None precompute_metric = None device =", "precompute_metric = precompute_mean_squared_error else: print(\"Unsupported metric!\") exit() global device device = args.device compute_wrapper(args,", "num_threads=1): config=args.config for k in range(args.offset, args.patient): pool = [] curr_prefix = args.prefix+str(k)", "os import pydicom import cv2 import numpy as np import math import glob", "= torch.empty(6, device=device) l = torch.arange(img.shape[0], device=device) moments[0] = torch.sum(img) # m00 moments[1]", "def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom = compute_moments(Flt_uint8) flt_avg_10 = flt_mom[1]/flt_mom[0]", "Remove outliers (indices 0 and -1 for each dimension). hist = hist[1:-1,1:-1] return", "OTHER DEALINGS IN THE # *SOFTWARE. # ******************************************/ import os import pydicom import", "type=int) parser.add_argument(\"-cp\", \"--ct_path\", nargs='?', help='Path of the CT Images', default='./') parser.add_argument(\"-pp\", \"--pet_path\", nargs='?',", "(l**2)) # m20 moments[3] = torch.sum(img * l.reshape((img.shape[0], 1)) ) # m01 moments[4]", "import pydicom import cv2 import numpy as np import math import glob import", "= images_per_thread * i end = images_per_thread * (i + 1) if i", "= Ncount[0]*nbin+Ncount[1] # Compute the number of repetitions in xy and assign it", "mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2] #cos_teta mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8,", "edges = torch.linspace(smin, smax, bins + 1, device=device) nbin = edges.shape[0] + 1", "Flt_uint8_ravel)) j_h_init = my_squared_hist2d_t(idx_joint, hist_dim, 0, 255)/Ref_uint8_ravel.numel() j_h = j_h_init[j_h_init>0.000000000000001] entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001]", "vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0 else: mat_params[0][0]=vector_params[2] #cos_teta mat_params[1][1]=vector_params[2]", "[] curr_prefix = args.prefix+str(k) curr_ct = os.path.join(curr_prefix,args.ct_path) curr_pet = os.path.join(curr_prefix,args.pet_path) curr_res = os.path.join(\"\",args.res_path)", "eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img, flt_imgs, t_mats,", "entropy=(torch.sum(j_h*(torch.log2(j_h))))*-1 hflt=torch.sum(j_h_init,axis=0) hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def", "cc_a.cpu(), cc_b.cpu() def compute_mse(ref_img, flt_imgs, t_mats, mse_ref): flt_warped = batch_transform(flt_imgs, t_mats) mse_a =", "False eps = 0.000005 last_mut=100000.0 it=0 while(not converged): converged=True it=it+1 for i in", "mutual_information(ref_img, flt_warped[0].ravel(), eref) mi_b = mutual_information(ref_img, flt_warped[1].ravel(), eref) return torch.exp(-mi_a).cpu(), torch.exp(-mi_b).cpu() def compute_cc(ref_img,", "compute_metric = None precompute_metric = None device = \"cpu\" ref_vals = None move_data", "images_per_thread = len(CT) // num_threads print(images_per_thread) for i in range(num_threads): start = images_per_thread", "# *IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # *FITNESS", "parser.add_argument(\"-im\", \"--image_dimension\", nargs='?', help='Target images dimensions', default=512, type=int) parser.add_argument(\"-c\", \"--config\", nargs='?', help='prefix Path", "params[0][1] = 0.0 params[1][0] = 0.0 params[1][1] = 1.0 return (params) def my_squared_hist2d_t(sample,", "mat_params[0][1]=torch.sqrt(1-(vector_params[2]**2)) mat_params[1][0]=-mat_params[0][1] return (mat_params) def estimate_initial(Ref_uint8, Flt_uint8, params): ref_mom = compute_moments(Ref_uint8) flt_mom =", "= 0.0 hist_dim = 256 dim = 512 global ref_vals ref_vals = torch.ones(dim*dim,", "= edges.shape[0] + 1 # Compute the bin number each sample falls into.", "vector_params[2] > 1 or vector_params[2] < -1: mat_params[0][0]=1 #cos_teta mat_params[1][1]=1 #cos_teta mat_params[0][1]=0 mat_params[1][0]=0", "hflt=hflt[hflt>0.000000000000001] eflt=(torch.sum(hflt*(torch.log2(hflt))))*-1 mutualinfo=eref+eflt-entropy return(mutualinfo) def precompute_cross_correlation(Ref_uint8_ravel): return torch.sum(Ref_uint8_ravel * Ref_uint8_ravel) def cross_correlation(Ref_uint8_ravel, Flt_uint8_ravel,", "flt_imgs, t_mats, eref): flt_warped = batch_transform(flt_imgs, t_mats) #flt_img = transform(flt_img, t_mat) mi_a =", "parser = argparse.ArgumentParser(description='Iron software for IR onto a python env') parser.add_argument(\"-pt\", \"--patient\", nargs='?',", "time.time() f_img = register_images(Ref_uint8, Flt_uint8) end_time= time.time() final_img.append(f_img.cpu()) it_time = (end_time - start_time)", "(ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11 = (ref_mom[5]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_10) params[0][2] = ref_mom[1]/ref_mom[0]-flt_mom[1]/flt_mom[0] params[1][2] = ref_mom[3]/ref_mom[0]", "ref_avg_10 = ref_mom[1]/ref_mom[0] ref_avg_01 = ref_mom[3]/ref_mom[0] ref_mu_20 = (ref_mom[2]/ref_mom[0]*1.0)-(ref_avg_10*ref_avg_10) ref_mu_02 = (ref_mom[4]/ref_mom[0]*1.0)-(ref_avg_01*ref_avg_01) ref_mu_11", "For the rightmost bin, we want values equal to the right edge to" ]
[ "in model_dict.items(): k.replace(' ', '_') pred = ns[k] if k in keys: i", "DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other',", "OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls,", "count = 0 with open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f) # list", "g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r') as f: data =", "#!/usr/bin/env python # -*- coding: utf-8 -*- # author: houzhiwei # time: 2018/7/11", "2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json from rdflib import", "l in v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for", "Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri", "type(v) == list and len(v) > 0: for l in v: l_node =", "from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns =", "RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g =", "import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri +", "最多只有两层 for vk, vv in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node,", "pred, l_node)) for lk, lv in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty))", "RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) == list and len(v) > 0:", "= ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) == list and", "v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) == list", "'r') as f: data = json.load(f) # list # pprint(data[0]['component']) for model_dict in", "# csdms json 最多只有两层 for vk, vv in v.items(): v_pred = ns[vk] g.add((v_pred,", "for k, v in model_dict.items(): k.replace(' ', '_') pred = ns[k] if k", "k, v in model_dict.items(): k.replace(' ', '_') pred = ns[k] if k in", "in v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk,", "Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS,", "g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput',", "model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items(): k.replace(' ', '_')", "g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json 最多只有两层 for vk, vv", "f: data = json.load(f) # list # pprint(data[0]['component']) for model_dict in data: #", "0 with open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f) # list # pprint(data[0]['component'])", "in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v)", "'Other', 'Component', 'Publication'] keys = ['contact', 'technical', 'io', 'input', 'output', 'process', 'testing', 'other',", "Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL", "# pprint(data[0]['component']) for model_dict in data: # model individual model_name = model_dict['model_name'].replace(' ',", "# -*- coding: utf-8 -*- # author: houzhiwei # time: 2018/7/11 16:37 #", "g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items(): k.replace('", "type(v) == dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub,", "g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json 最多只有两层", "v_pred, Literal(vv))) elif type(v) == list and len(v) > 0: for l in", "ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls,", "len(v) > 0: for l in v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]]))", "RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items(): k.replace(' ',", "= 0 with open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f) # list #", "vv in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif", "g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) == dict: v_node = BNode()", "keys = ['contact', 'technical', 'io', 'input', 'output', 'process', 'testing', 'other', 'component', 'publications'] for", "'io', 'input', 'output', 'process', 'testing', 'other', 'component', 'publications'] for cls in ont_cls: g.add((ns[cls],", "RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class))", "= 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS)", "l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty)) g.add((l_node, l_pred, Literal(lv))) # pprint(g.serialize(format='application/rdf+xml')) g.serialize(destination='csdms_test.owl')", "json from rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace", "from pprint import pprint import json from rdflib import BNode, Literal, Graph, Namespace,", "OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f) #", "for l in v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node))", "0: for l in v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred,", "i = keys.index(k) else: continue if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub,", "== dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred,", "Literal(vv))) elif type(v) == list and len(v) > 0: for l in v:", "cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls =", "BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json", "= json.load(f) # list # pprint(data[0]['component']) for model_dict in data: # model individual", "# https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json from rdflib import BNode, Literal,", "import pprint import json from rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef,", "resource from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns", "= ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input',", "'other', 'component', 'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing))", "= ns[k] if k in keys: i = keys.index(k) else: continue if type(v)", "type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) == dict:", "OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) == list and len(v) > 0: for", "sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v", "v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv", "for model_dict in data: # model individual model_name = model_dict['model_name'].replace(' ', '_') sub", "keys: i = keys.index(k) else: continue if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty))", "g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing',", "import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS, DCTERMS,", "v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) ==", "'component', 'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) #", "', '_') sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for", "Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls =", "['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys = ['contact',", "RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing))", "model individual model_name = model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub, RDF.type, model_cls))", "pprint(data[0]['component']) for model_dict in data: # model individual model_name = model_dict['model_name'].replace(' ', '_')", "g.add((sub, pred, l_node)) for lk, lv in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type,", "'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys = ['contact', 'technical',", "pred = ns[k] if k in keys: i = keys.index(k) else: continue if", "keys.index(k) else: continue if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v)))", "g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv in l.items(): l_pred =", "SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items(): k.replace(' ', '_') pred =", "= ['contact', 'technical', 'io', 'input', 'output', 'process', 'testing', 'other', 'component', 'publications'] for cls", "'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys = ['contact', 'technical', 'io',", "lang='en'))) for k, v in model_dict.items(): k.replace(' ', '_') pred = ns[k] if", "pred, Literal(v))) elif type(v) == dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred,", "from rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import", "python # -*- coding: utf-8 -*- # author: houzhiwei # time: 2018/7/11 16:37", "'output', 'process', 'testing', 'other', 'component', 'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class))", "'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\",", "+ '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact',", "'_') pred = ns[k] if k in keys: i = keys.index(k) else: continue", "= BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv in l.items():", "RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json 最多只有两层 for vk, vv in", "RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri)", "json 最多只有两层 for vk, vv in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty))", "g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items(): k.replace(' ', '_') pred", "ns[k] if k in keys: i = keys.index(k) else: continue if type(v) ==", "data: # model individual model_name = model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub,", "l_node)) for lk, lv in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty)) g.add((l_node,", "model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en')))", "'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类", "k.replace(' ', '_') pred = ns[k] if k in keys: i = keys.index(k)", "lk, lv in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty)) g.add((l_node, l_pred, Literal(lv)))", "BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS, DCTERMS, RDF,", "open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f) # list # pprint(data[0]['component']) for model_dict", "json.load(f) # list # pprint(data[0]['component']) for model_dict in data: # model individual model_name", "l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv in", "# author: houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint", "RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) == dict: v_node = BNode() g.add((v_node,", "utf-8 -*- # author: houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint", "ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys", "'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys = ['contact', 'technical', 'io', 'input',", "elif type(v) == dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty))", "RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json 最多只有两层 for", "g.add((v_node, v_pred, Literal(vv))) elif type(v) == list and len(v) > 0: for l", "-*- coding: utf-8 -*- # author: houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169", "-*- # author: houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import", "list # pprint(data[0]['component']) for model_dict in data: # model individual model_name = model_dict['model_name'].replace('", "# model individual model_name = model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub, RDF.type,", "with open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f) # list # pprint(data[0]['component']) for", "Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items(): k.replace(' ', '_') pred = ns[k]", "OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS)", "model_dict in data: # model individual model_name = model_dict['model_name'].replace(' ', '_') sub =", "if k in keys: i = keys.index(k) else: continue if type(v) == str:", "', '_') pred = ns[k] if k in keys: i = keys.index(k) else:", "in keys: i = keys.index(k) else: continue if type(v) == str: g.add((pred, RDF.type,", "Literal(v))) elif type(v) == dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type,", "= BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms", "ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output',", "= Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls", "== list and len(v) > 0: for l in v: l_node = BNode()", "and len(v) > 0: for l in v: l_node = BNode() g.add((l_node, RDF.type,", "in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel']", "OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r') as f: data", "ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r')", "OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count", "# 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count =", "csdms json 最多只有两层 for vk, vv in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type,", "== str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) == dict: v_node", "ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv in l.items(): l_pred = ns[lk] g.add((l_pred,", "RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r') as f: data = json.load(f)", "time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json from rdflib", "ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) == list and len(v)", "rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS,", "https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json from rdflib import BNode, Literal, Graph,", "= ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys =", "str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) == dict: v_node =", "g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type,", "= model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'],", "else: continue if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif", "v in model_dict.items(): k.replace(' ', '_') pred = ns[k] if k in keys:", "BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv in l.items(): l_pred", "'_') sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k,", "model_dict.items(): k.replace(' ', '_') pred = ns[k] if k in keys: i =", "'#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs',", "'Component', 'Publication'] keys = ['contact', 'technical', 'io', 'input', 'output', 'process', 'testing', 'other', 'component',", "'process', 'testing', 'other', 'component', 'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls],", "ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\",", "pprint import pprint import json from rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph,", "# time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json from", "= ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json',", "<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*- # author: houzhiwei # time:", "'testing', 'other', 'component', 'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf,", "coding: utf-8 -*- # author: houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from", "URIRef, resource from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial'", "vk, vv in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv)))", "g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process',", "'technical', 'io', 'input', 'output', 'process', 'testing', 'other', 'component', 'publications'] for cls in ont_cls:", "in data: # model individual model_name = model_dict['model_name'].replace(' ', '_') sub = ns[model_name]", "g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf,", "continue if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v)", "OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) == dict: v_node = BNode() g.add((v_node, RDF.type,", "model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with", "k in keys: i = keys.index(k) else: continue if type(v) == str: g.add((pred,", "ConjunctiveGraph, URIRef, resource from rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri =", "if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred, Literal(v))) elif type(v) ==", "import json from rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource from", "'Publication'] keys = ['contact', 'technical', 'io', 'input', 'output', 'process', 'testing', 'other', 'component', 'publications']", "for vk, vv in v.items(): v_pred = ns[vk] g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred,", "g.bind(\"model\", ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component',", "rdflib.namespace import SKOS, DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri", "ns) ont_cls = ['Contact', 'TechnicalSpecs', 'InputOutput', 'Input', 'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication']", "'Testing', 'Other', 'Component', 'Publication'] keys = ['contact', 'technical', 'io', 'input', 'output', 'process', 'testing',", "ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in model_dict.items():", "houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json", "DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#') g", "as f: data = json.load(f) # list # pprint(data[0]['component']) for model_dict in data:", "lv in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty)) g.add((l_node, l_pred, Literal(lv))) #", "author: houzhiwei # time: 2018/7/11 16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import", "= keys.index(k) else: continue if type(v) == str: g.add((pred, RDF.type, OWL.DatatypeProperty)) g.add((sub, pred,", "v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) #", "g.add((v_pred, RDF.type, OWL.DatatypeProperty)) g.add((v_node, v_pred, Literal(vv))) elif type(v) == list and len(v) >", "16:37 # https://blog.csdn.net/headwind_/article/details/70234169 from pprint import pprint import json from rdflib import BNode,", "pprint import json from rdflib import BNode, Literal, Graph, Namespace, ConjunctiveGraph, URIRef, resource", "elif type(v) == list and len(v) > 0: for l in v: l_node", "v_node)) # csdms json 最多只有两层 for vk, vv in v.items(): v_pred = ns[vk]", "data = json.load(f) # list # pprint(data[0]['component']) for model_dict in data: # model", "in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty)) g.add((l_node, l_pred, Literal(lv))) # pprint(g.serialize(format='application/rdf+xml'))", "> 0: for l in v: l_node = BNode() g.add((l_node, RDF.type, ns[ont_cls[i]])) g.add((sub,", "OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json 最多只有两层 for vk, vv in v.items():", "dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node))", "'input', 'output', 'process', 'testing', 'other', 'component', 'publications'] for cls in ont_cls: g.add((ns[cls], RDF.type,", "'Output', 'Process', 'Testing', 'Other', 'Component', 'Publication'] keys = ['contact', 'technical', 'io', 'input', 'output',", "model_name = model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel,", "['contact', 'technical', 'io', 'input', 'output', 'process', 'testing', 'other', 'component', 'publications'] for cls in", "g.add((sub, pred, v_node)) # csdms json 最多只有两层 for vk, vv in v.items(): v_pred", "individual model_name = model_dict['model_name'].replace(' ', '_') sub = ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub,", "RDF.type, ns[ont_cls[i]])) g.add((sub, pred, l_node)) for lk, lv in l.items(): l_pred = ns[lk]", "'Process', 'Testing', 'Other', 'Component', 'Publication'] keys = ['contact', 'technical', 'io', 'input', 'output', 'process',", "for cls in ont_cls: g.add((ns[cls], RDF.type, OWL.Class)) g.add((ns[cls], RDFS.subClassOf, OWL.Thing)) # 创建顶层类 model_cls", "创建顶层类 model_cls = ns['GeoSpatialModel'] g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0", "# list # pprint(data[0]['component']) for model_dict in data: # model individual model_name =", "ns[ont_cls[i]])) g.add((pred, RDF.type, OWL.ObjectProperty)) g.add((sub, pred, v_node)) # csdms json 最多只有两层 for vk,", "g.add((sub, pred, Literal(v))) elif type(v) == dict: v_node = BNode() g.add((v_node, RDF.type, ns[ont_cls[i]]))", "list and len(v) > 0: for l in v: l_node = BNode() g.add((l_node,", "= ns[model_name] g.add((sub, RDF.type, model_cls)) g.add((sub, SKOS.prefLabel, Literal(model_dict['model_name'], lang='en'))) for k, v in", "for lk, lv in l.items(): l_pred = ns[lk] g.add((l_pred, RDF.type, OWL.DatatypeProperty)) g.add((l_node, l_pred,", "SKOS, DCTERMS, RDF, RDFS, OWL ont_uri = 'http://www.egc.org/ont/model/geospatial' ns = Namespace(ont_uri + '#')", "RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r') as f:", "ns = Namespace(ont_uri + '#') g = ConjunctiveGraph(identifier=ont_uri) g.bind(\"skos\",SKOS) g.bind(\"dcterms\", DCTERMS) g.bind(\"model\", ns)", "g.add((model_cls, RDF.type, OWL.Class)) g.add((model_cls, RDFS.subClassOf, OWL.Thing)) count = 0 with open('../CSDMS/csdms3.json', 'r') as", "pred, v_node)) # csdms json 最多只有两层 for vk, vv in v.items(): v_pred =" ]
[ "as np from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1,", "matrix_a = np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6, 8], [4, 2]]) expected", "= np.array([[6, 8], [4, 2]]) expected = np.array([[18, 14], [62, 66]]) self.assertTrue(bool((square_matrix_multiply(matrix_a, matrix_b)", "class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6,", "src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]])", "square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]]) matrix_b =", "numpy as np from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a =", "= np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6, 8], [4, 2]]) expected =", "TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6, 8],", "unittest import numpy as np from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self):", "import unittest import numpy as np from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def", "import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]]) matrix_b", "np.array([[6, 8], [4, 2]]) expected = np.array([[18, 14], [62, 66]]) self.assertTrue(bool((square_matrix_multiply(matrix_a, matrix_b) ==", "np from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3],", "5]]) matrix_b = np.array([[6, 8], [4, 2]]) expected = np.array([[18, 14], [62, 66]])", "8], [4, 2]]) expected = np.array([[18, 14], [62, 66]]) self.assertTrue(bool((square_matrix_multiply(matrix_a, matrix_b) == expected).all()))", "np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6, 8], [4, 2]]) expected = np.array([[18,", "3], [7, 5]]) matrix_b = np.array([[6, 8], [4, 2]]) expected = np.array([[18, 14],", "from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a = np.array([[1, 3], [7,", "[7, 5]]) matrix_b = np.array([[6, 8], [4, 2]]) expected = np.array([[18, 14], [62,", "matrix_b = np.array([[6, 8], [4, 2]]) expected = np.array([[18, 14], [62, 66]]) self.assertTrue(bool((square_matrix_multiply(matrix_a,", "import numpy as np from src.square_matrix_multiply import square_matrix_multiply class TestStrassenMultiply(unittest.TestCase): def test_square_1(self): matrix_a", "def test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6, 8], [4,", "test_square_1(self): matrix_a = np.array([[1, 3], [7, 5]]) matrix_b = np.array([[6, 8], [4, 2]])" ]
[ "= ['azdevman'], entry_points = { 'console_scripts': [ 'azdevman = azdevman.main:cli' ] } )", "setuptools import setup setup( name = 'azdevman', version = '0.0.1', packages = ['azdevman'],", "'0.0.1', packages = ['azdevman'], entry_points = { 'console_scripts': [ 'azdevman = azdevman.main:cli' ]", "= 'azdevman', version = '0.0.1', packages = ['azdevman'], entry_points = { 'console_scripts': [", "version = '0.0.1', packages = ['azdevman'], entry_points = { 'console_scripts': [ 'azdevman =", "name = 'azdevman', version = '0.0.1', packages = ['azdevman'], entry_points = { 'console_scripts':", "import setup setup( name = 'azdevman', version = '0.0.1', packages = ['azdevman'], entry_points", "from setuptools import setup setup( name = 'azdevman', version = '0.0.1', packages =", "setup( name = 'azdevman', version = '0.0.1', packages = ['azdevman'], entry_points = {", "= '0.0.1', packages = ['azdevman'], entry_points = { 'console_scripts': [ 'azdevman = azdevman.main:cli'", "packages = ['azdevman'], entry_points = { 'console_scripts': [ 'azdevman = azdevman.main:cli' ] }", "'azdevman', version = '0.0.1', packages = ['azdevman'], entry_points = { 'console_scripts': [ 'azdevman", "setup setup( name = 'azdevman', version = '0.0.1', packages = ['azdevman'], entry_points =" ]
[ "database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port: :param user: :param password: :param", "password: :param database: :param charset: :return: \"\"\" return pymysql.connect(host=host, port=port, user=user, password=password, database=database,", "pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param", ":param password: :param database: :param charset: :return: \"\"\" return pymysql.connect(host=host, port=port, user=user, password=password,", "coding: utf-8 -*- \"\"\" 数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02 AM \"\"\"", "on 03/11/2018 10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci',", "Created on 03/11/2018 10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>',", "def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port:", "03/11/2018 10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'):", "charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port: :param user: :param password: :param database:", "host: :param port: :param user: :param password: :param database: :param charset: :return: \"\"\"", "获取MySQL连接. :param host: :param port: :param user: :param password: :param database: :param charset:", ":param port: :param user: :param password: :param database: :param charset: :return: \"\"\" return", "10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\"", "@author: zhoujiagen Created on 03/11/2018 10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306,", "user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port: :param user: :param", "\"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param", "\"\"\" 数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02 AM \"\"\" import pymysql def", ":param user: :param password: :param database: :param charset: :return: \"\"\" return pymysql.connect(host=host, port=port,", "connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port: :param", "user: :param password: :param database: :param charset: :return: \"\"\" return pymysql.connect(host=host, port=port, user=user,", "port: :param user: :param password: :param database: :param charset: :return: \"\"\" return pymysql.connect(host=host,", ":param host: :param port: :param user: :param password: :param database: :param charset: :return:", "\"\"\" 获取MySQL连接. :param host: :param port: :param user: :param password: :param database: :param", "utf-8 -*- \"\"\" 数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02 AM \"\"\" import", ":param database: :param charset: :return: \"\"\" return pymysql.connect(host=host, port=port, user=user, password=password, database=database, charset=charset)", "password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port: :param user: :param password:", "zhoujiagen Created on 03/11/2018 10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root',", "AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接.", "-*- coding: utf-8 -*- \"\"\" 数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02 AM", "# -*- coding: utf-8 -*- \"\"\" 数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02", "import pymysql def connect_mysql(host='127.0.0.1', port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host:", "数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02 AM \"\"\" import pymysql def connect_mysql(host='127.0.0.1',", "-*- \"\"\" 数据库工具. @author: zhoujiagen Created on 03/11/2018 10:02 AM \"\"\" import pymysql", "port=3306, user='root', password='<PASSWORD>', database='pci', charset='utf8'): \"\"\" 获取MySQL连接. :param host: :param port: :param user:" ]
[ "import EntitiesQuery, MatchQuery from aleph.views.util import jsonify from aleph.logic.util import entity_url from aleph.index.util", "prefix = request.args.get('prefix', '').lower().strip() matches = [] for type_ in get_freebase_types(): name =", "'num': len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE,", "= type_.get('name').lower() if not len(prefix) or prefix in name: matches.append(type_) return jsonify({ \"code\":", "request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix',", "= { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') },", "'r:score': 100, 'n:type': { 'id': '/properties/property', 'name': 'Property' } }) return jsonify({ \"code\":", "'uri': entity_url(entity.id), 'match': False } for type_ in get_freebase_types(): if entity.schema.name == type_['id']:", "import jsonify from aleph.logic.util import entity_url from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API", "{ 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain,", "requests in one query qs = request.values.get('queries') try: qs = json.loads(qs) except ValueError:", "aleph.views.util import jsonify from aleph.logic.util import entity_url from aleph.index.util import unpack_result # See:", "model from followthemoney.compare import compare from aleph.core import settings, url_for from aleph.model import", "except ValueError: raise BadRequest() else: q = request.values return jsonify(reconcile_op(q)) elif 'queries' in", "= type_ match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\",", "}, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET',", "aleph.search import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import jsonify from", "\"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property():", "match = { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for", "return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\"", "in request.values: # multiple requests in one query qs = request.values.get('queries') try: qs", "for schema in model: if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label }) return", "score, 'uri': entity_url(entity.id), 'match': False } for type_ in get_freebase_types(): if entity.schema.name ==", "request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix',", "'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property', 'name':", "}) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches", "= [type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name, len(matches)) return { 'result':", "} def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id',", "schema.name, 'name': schema.label }) return types def reconcile_op(query): \"\"\"Reconcile operation for a single", "{ 'url': entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest': { 'entity': { 'service_url':", "request.values: # single q = request.values.get('query') if q.startswith('{'): try: q = json.loads(q) except", "qs = request.values.get('queries') try: qs = json.loads(qs) except ValueError: raise BadRequest() queries =", "''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip()", "[type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\":", "= prefix in prop.name.lower() match = match or prefix in prop.label.lower() if match:", "entity_url from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log", "domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] }", "quiet=True) query = MatchQuery(parser, entity=proxy) matches = [] for doc in query.search().get('hits').get('hits'): entity", "'n:type': { 'id': '/properties/property', 'name': 'Property' } }) return jsonify({ \"code\": \"/api/status/ok\", \"status\":", "import pprint # noqa from flask import Blueprint, request from werkzeug.exceptions import BadRequest", "SearchQueryParser(args, request.authz) if parser.prefix is not None: query = EntitiesQuery(parser) for doc in", "'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } },", "''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip()", "from flask import Blueprint, request from werkzeug.exceptions import BadRequest from followthemoney import model", "schema in model: if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label }) return types", "} matches = [] parser = SearchQueryParser(args, request.authz) if parser.prefix is not None:", "for prop in model.properties: match = not len(prefix) if not match: match =", "p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches = [] for doc in query.search().get('hits').get('hits'):", "query qs = request.values.get('queries') try: qs = json.loads(qs) except ValueError: raise BadRequest() queries", "{ 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type')", "import json import math import logging from pprint import pprint # noqa from", "False } for type_ in get_freebase_types(): if entity.schema.name == type_['id']: match['type'] = [type_]", "for type_ in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] = type_ match['type'] =", "-> %d matches\", name, len(matches)) return { 'result': matches, 'num': len(matches) } def", "} for type_ in get_freebase_types(): if entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match)", "Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types(): types = [] for schema in", "url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id':", "'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for type_ in", "proxy, entity) * 100) match = { 'id': entity.id, 'name': entity.caption, 'score': score,", "type_.get('name').lower() if not len(prefix) or prefix in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\",", "or prefix in prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label,", "{ 'id': '/properties/property', 'name': 'Property' } }) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200", "matches\", name, len(matches)) return { 'result': matches, 'num': len(matches) } def reconcile_index(): domain", "noqa from flask import Blueprint, request from werkzeug.exceptions import BadRequest from followthemoney import", "'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property')", "for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is None: continue entity", "entity = unpack_result(doc) if entity is None: continue entity = model.get_proxy(entity) score =", "methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\" args = {", "'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False } for type_ in get_freebase_types():", "from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log =", "= math.ceil(compare(model, proxy, entity) * 100) match = { 'id': entity.id, 'name': entity.caption,", "* 100) match = { 'id': entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id),", "400 }, 'suggest': { 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type':", "= json.loads(q) except ValueError: raise BadRequest() else: q = request.values return jsonify(reconcile_op(q)) elif", "'Property' } }) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''),", "except ValueError: raise BadRequest() queries = {} for k, q in qs.items(): queries[k]", "len(prefix) or prefix in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\",", "query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches = [] for", "entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False } for type_ in get_freebase_types(): if", "return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API, emulates Google Refine", "= unpack_result(doc) if entity is None: continue entity = model.get_proxy(entity) score = math.ceil(compare(model,", "= request.values.get('query') if q.startswith('{'): try: q = json.loads(q) except ValueError: raise BadRequest() else:", "from aleph.search import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import jsonify", "\"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix =", "doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for type_ in get_freebase_types(): if", "}, 'suggest': { 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': {", "'name': 'Property' } }) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix',", "{ 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') }", "'match': False } for type_ in get_freebase_types(): if entity.schema.name == type_['id']: match['type'] =", "BadRequest() queries = {} for k, q in qs.items(): queries[k] = reconcile_op(q) return", "blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types(): types = [] for", "json import math import logging from pprint import pprint # noqa from flask", "methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for prop", "q = json.loads(q) except ValueError: raise BadRequest() else: q = request.values return jsonify(reconcile_op(q))", "= SearchQueryParser(args, request.authz) if parser.prefix is not None: query = EntitiesQuery(parser) for doc", "== type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name, len(matches))", "API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: #", "or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in query.get('properties', []):", "= [] for schema in model: if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label", "types def reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\" parser = SearchQueryParser({ 'limit':", "name, len(matches)) return { 'result': matches, 'num': len(matches) } def reconcile_index(): domain =", "settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url':", "not len(prefix) or prefix in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200", "= model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) * 100) match = { 'id':", "[]): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches = [] for doc", "jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property',", "= request.args.get('prefix', '').lower().strip() matches = [] for type_ in get_freebase_types(): name = type_.get('name').lower()", "request.args.get('prefix', '').lower().strip() matches = [] for type_ in get_freebase_types(): name = type_.get('name').lower() if", "pprint import pprint # noqa from flask import Blueprint, request from werkzeug.exceptions import", "matches = [] for prop in model.properties: match = not len(prefix) if not", "'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width':", "source = doc.get('_source') match = { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score':", "doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is None: continue entity =", "if 'query' in request.values: # single q = request.values.get('query') if q.startswith('{'): try: q", "match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name, len(matches)) return {", "'')) for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy)", "type_ in get_freebase_types(): name = type_.get('name').lower() if not len(prefix) or prefix in name:", "http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: # single q = request.values.get('query') if q.startswith('{'):", "= SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz) name = query.get('query', '')", "a single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz)", "reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API,", "in model.properties: match = not len(prefix) if not match: match = prefix in", "query.get('query', '')) for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser,", "methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for type_", "if not len(prefix) or prefix in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\":", "= reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest", "\"\"\"Reconcile operation for a single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict':", "{ 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label", "settings, url_for from aleph.model import Entity from aleph.search import SearchQueryParser from aleph.search import", "%r -> %d matches\", name, len(matches)) return { 'result': matches, 'num': len(matches) }", "}, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path':", "settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview': { 'url':", "query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz) name =", "url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url':", "'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for prop in", "query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is None: continue entity = model.get_proxy(entity) score", "aleph.logic.util import entity_url from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api',", "in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is None: continue entity = model.get_proxy(entity)", "request.authz) name = query.get('query', '') schema = query.get('type') or Entity.THING proxy = model.make_entity(schema)", "entity_url(entity.id), 'match': False } for type_ in get_freebase_types(): if entity.schema.name == type_['id']: match['type']", "'POST']) def reconcile(): \"\"\" Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\"", "domain = settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view':", "request from werkzeug.exceptions import BadRequest from followthemoney import model from followthemoney.compare import compare", "query.search().get('hits').get('hits'): source = doc.get('_source') match = { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'),", "'r:score': doc.get('_score'), } for type_ in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] =", "'id': schema.name, 'name': schema.label }) return types def reconcile_op(query): \"\"\"Reconcile operation for a", "match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200", "in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches = []", "types = [] for schema in model: if schema.matchable: types.append({ 'id': schema.name, 'name':", "= { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for type_", "matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches =", "'width': 800, 'height': 400 }, 'suggest': { 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity',", "'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }]", "parser = SearchQueryParser(args, request.authz) if parser.prefix is not None: query = EntitiesQuery(parser) for", "entity is None: continue entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) *", "OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix", "'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation", "= {} for k, q in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else:", "= { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = [] parser = SearchQueryParser(args,", "request.args.getlist('type') } matches = [] parser = SearchQueryParser(args, request.authz) if parser.prefix is not", "return { 'result': matches, 'num': len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta", "'/properties/property', 'name': 'Property' } }) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\":", "methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi", "k, q in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest',", "proxy.add('name', query.get('query', '')) for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query =", "'').lower().strip() matches = [] for prop in model.properties: match = not len(prefix) if", "operation for a single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false'", "= [] for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is None:", "continue entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) * 100) match =", "aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__)", "\"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST'])", "'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING, 'name':", "if entity is None: continue entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity)", "'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'),", "def reconcile(): \"\"\" Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if", "'name': prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property', 'name': 'Property' } }) return", "get_freebase_types(): if entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r -> %d", "query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in query.get('properties',", "def suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'),", "import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def", "API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: # single q = request.values.get('query')", "{ 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview':", "'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') },", "jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type',", "pprint # noqa from flask import Blueprint, request from werkzeug.exceptions import BadRequest from", "}) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches = []", "match = { 'id': entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False", "from aleph.core import settings, url_for from aleph.model import Entity from aleph.search import SearchQueryParser", "return types def reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\" parser = SearchQueryParser({", "\"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET',", "Entity from aleph.search import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import", "if q.startswith('{'): try: q = json.loads(q) except ValueError: raise BadRequest() else: q =", "jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple requests in one query qs =", "in prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100,", "import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import jsonify from aleph.logic.util", "jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google", "} }) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\":", "@blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for", "in query.search().get('hits').get('hits'): source = doc.get('_source') match = { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name':", "'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': {", "len(matches)) return { 'result': matches, 'num': len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/')", "match: match = prefix in prop.name.lower() match = match or prefix in prop.label.lower()", "from followthemoney import model from followthemoney.compare import compare from aleph.core import settings, url_for", "from werkzeug.exceptions import BadRequest from followthemoney import model from followthemoney.compare import compare from", "\"\"\" Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in", "doc.get('_score'), } for type_ in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] = type_", "get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match) return", "API, emulates Google Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') }", "See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: # single q = request.values.get('query') if", "== type_['id']: match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\",", "type_ in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] = type_ match['type'] = [type_['name']]", "}, request.authz) name = query.get('query', '') schema = query.get('type') or Entity.THING proxy =", "}] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API, emulates", "logging from pprint import pprint # noqa from flask import Blueprint, request from", "from aleph.model import Entity from aleph.search import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery", "# multiple requests in one query qs = request.values.get('queries') try: qs = json.loads(qs)", "one query qs = request.values.get('queries') try: qs = json.loads(qs) except ValueError: raise BadRequest()", "doc in query.search().get('hits').get('hits'): source = doc.get('_source') match = { 'quid': doc.get('_id'), 'id': doc.get('_id'),", "prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property', 'name': 'Property' } }) return jsonify({", "See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types(): types =", "'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height':", "request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple requests in one query", "'false' }, request.authz) name = query.get('query', '') schema = query.get('type') or Entity.THING proxy", "if match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type': {", "entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name,", "jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API, emulates Google Refine API.", "single q = request.values.get('query') if q.startswith('{'): try: q = json.loads(q) except ValueError: raise", "'score': score, 'uri': entity_url(entity.id), 'match': False } for type_ in get_freebase_types(): if entity.schema.name", "request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = [] parser = SearchQueryParser(args, request.authz) if parser.prefix", "matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches", "'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = [] parser = SearchQueryParser(args, request.authz) if", "import entity_url from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__)", "'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for type_ in get_freebase_types(): if source.get('schema')", "= query.get('query', '') schema = query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query',", "proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches = [] for doc in", "if not match: match = prefix in prop.name.lower() match = match or prefix", "Google Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches =", "prop in model.properties: match = not len(prefix) if not match: match = prefix", "# single q = request.values.get('query') if q.startswith('{'): try: q = json.loads(q) except ValueError:", "if entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\",", "def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for prop in model.properties:", "type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name, len(matches)) return", "name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\":", "\"\"\" if 'query' in request.values: # single q = request.values.get('query') if q.startswith('{'): try:", "from aleph.logic.util import entity_url from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint =", "request.authz) if parser.prefix is not None: query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'):", "prefix in prop.name.lower() match = match or prefix in prop.label.lower() if match: matches.append({", "{ 'id': entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False } for", "def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace':", "EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source = doc.get('_source') match = { 'quid': doc.get('_id'),", "import compare from aleph.core import settings, url_for from aleph.model import Entity from aleph.search", "entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False } for type_ in", "math.ceil(compare(model, proxy, entity) * 100) match = { 'id': entity.id, 'name': entity.caption, 'score':", "json.loads(qs) except ValueError: raise BadRequest() queries = {} for k, q in qs.items():", "= settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': {", "'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest': { 'entity': {", "# noqa from flask import Blueprint, request from werkzeug.exceptions import BadRequest from followthemoney", "request.values: # multiple requests in one query qs = request.values.get('queries') try: qs =", "'result': matches, 'num': len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = {", "jsonify from aleph.logic.util import entity_url from aleph.index.util import unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint", "'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST'])", "multiple requests in one query qs = request.values.get('queries') try: qs = json.loads(qs) except", "matches = [] for type_ in get_freebase_types(): name = type_.get('name').lower() if not len(prefix)", "math import logging from pprint import pprint # noqa from flask import Blueprint,", "[{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def", "domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property':", "\"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET',", "for k, q in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else: return reconcile_index()", "'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width': 800,", "parser.prefix is not None: query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source =", "from pprint import pprint # noqa from flask import Blueprint, request from werkzeug.exceptions", "raise BadRequest() else: q = request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values: #", "Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\"", "len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace':", "reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'),", "'id': entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False } for type_", "= [] for prop in model.properties: match = not len(prefix) if not match:", "[] for type_ in get_freebase_types(): name = type_.get('name').lower() if not len(prefix) or prefix", "= MatchQuery(parser, entity=proxy) matches = [] for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc)", "\"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches", "source.get('schema') == type_['id']: match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\":", "or prefix in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\":", "MatchQuery from aleph.views.util import jsonify from aleph.logic.util import entity_url from aleph.index.util import unpack_result", "model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query", "emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: # single", "OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix", "\"\"\"Suggest API, emulates Google Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type')", "return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates", "import logging from pprint import pprint # noqa from flask import Blueprint, request", "len(prefix) if not match: match = prefix in prop.name.lower() match = match or", "'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{", "= { 'id': entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match': False }", "raise BadRequest() queries = {} for k, q in qs.items(): queries[k] = reconcile_op(q)", "}, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING,", "'name': schema.label }) return types def reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\"", "'filter:schemata': request.args.getlist('type') } matches = [] parser = SearchQueryParser(args, request.authz) if parser.prefix is", "@blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API, emulates Google Refine API. See:", "EntitiesQuery, MatchQuery from aleph.views.util import jsonify from aleph.logic.util import entity_url from aleph.index.util import", "= doc.get('_source') match = { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'),", "entity=proxy) matches = [] for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity", "schema = query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for p", "prefix in prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score':", "'queries' in request.values: # multiple requests in one query qs = request.values.get('queries') try:", "match = prefix in prop.name.lower() match = match or prefix in prop.label.lower() if", "in get_freebase_types(): if entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r ->", "Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = []", "_authorize=True) }, 'type': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain,", "MatchQuery(parser, entity=proxy) matches = [] for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if", "{} for k, q in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else: return", "}) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches = []", "from followthemoney.compare import compare from aleph.core import settings, url_for from aleph.model import Entity", "BadRequest() else: q = request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple", "else: q = request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple requests", "None: query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source = doc.get('_source') match =", "'').lower().strip() matches = [] for type_ in get_freebase_types(): name = type_.get('name').lower() if not", "reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id',", "reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\" args", "prefix in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix',", "logging.getLogger(__name__) def get_freebase_types(): types = [] for schema in model: if schema.matchable: types.append({", "[] for schema in model: if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label })", "queries = {} for k, q in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries)", "is None: continue entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) * 100)", "= model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True)", "def get_freebase_types(): types = [] for schema in model: if schema.matchable: types.append({ 'id':", "'height': 400 }, 'suggest': { 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) },", "import Blueprint, request from werkzeug.exceptions import BadRequest from followthemoney import model from followthemoney.compare", "def reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit',", "match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix',", "doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for type_ in get_freebase_types(): if source.get('schema') ==", "suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for prop in model.properties: match", "import math import logging from pprint import pprint # noqa from flask import", "log.info(\"Reconciled: %r -> %d matches\", name, len(matches)) return { 'result': matches, 'num': len(matches)", "not len(prefix) if not match: match = prefix in prop.name.lower() match = match", "model.properties: match = not len(prefix) if not match: match = prefix in prop.name.lower()", "prop.name, 'name': prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property', 'name': 'Property' } })", "model: if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label }) return types def reconcile_op(query):", "None: continue entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) * 100) match", "= [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''),", "url_for from aleph.model import Entity from aleph.search import SearchQueryParser from aleph.search import EntitiesQuery,", "werkzeug.exceptions import BadRequest from followthemoney import model from followthemoney.compare import compare from aleph.core", "'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}') }, 'preview': {", "flask import Blueprint, request from werkzeug.exceptions import BadRequest from followthemoney import model from", "entity) * 100) match = { 'id': entity.id, 'name': entity.caption, 'score': score, 'uri':", "\"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def", "matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name, len(matches)) return { 'result': matches, 'num':", "in one query qs = request.values.get('queries') try: qs = json.loads(qs) except ValueError: raise", "800, 'height': 400 }, 'suggest': { 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True)", "\"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type():", "= not len(prefix) if not match: match = prefix in prop.name.lower() match =", "try: q = json.loads(q) except ValueError: raise BadRequest() else: q = request.values return", "request.values.get('queries') try: qs = json.loads(qs) except ValueError: raise BadRequest() queries = {} for", "'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url': domain, 'service_path':", "entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) * 100) match = {", "for type_ in get_freebase_types(): if entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled:", "else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google Refine", "= [] for type_ in get_freebase_types(): name = type_.get('name').lower() if not len(prefix) or", "# See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types(): types", "'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz) name = query.get('query', '') schema =", "in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match)", "type_['id']: match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\":", "prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property', 'name': 'Property'", "[type_] matches.append(match) log.info(\"Reconciled: %r -> %d matches\", name, len(matches)) return { 'result': matches,", "schema.label }) return types def reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\" parser", "https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types(): types = []", "__name__) log = logging.getLogger(__name__) def get_freebase_types(): types = [] for schema in model:", "'id': '/properties/property', 'name': 'Property' } }) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\",", "import model from followthemoney.compare import compare from aleph.core import settings, url_for from aleph.model", "matches, 'num': len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta = { 'name':", "p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches =", "= request.args.get('prefix', '').lower().strip() matches = [] for prop in model.properties: match = not", "@blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\" args =", "suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata':", "ValueError: raise BadRequest() else: q = request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values:", "{ 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = [] parser = SearchQueryParser(args, request.authz)", "matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches", "followthemoney import model from followthemoney.compare import compare from aleph.core import settings, url_for from", "= query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in", "type_ in get_freebase_types(): if entity.schema.name == type_['id']: match['type'] = [type_] matches.append(match) log.info(\"Reconciled: %r", "100) match = { 'id': entity.id, 'name': entity.caption, 'score': score, 'uri': entity_url(entity.id), 'match':", "match = not len(prefix) if not match: match = prefix in prop.name.lower() match", "\"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST']) def suggest_type(): prefix =", "q.startswith('{'): try: q = json.loads(q) except ValueError: raise BadRequest() else: q = request.values", "return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches })", "}) return types def reconcile_op(query): \"\"\"Reconcile operation for a single query.\"\"\" parser =", "import settings, url_for from aleph.model import Entity from aleph.search import SearchQueryParser from aleph.search", "try: qs = json.loads(qs) except ValueError: raise BadRequest() queries = {} for k,", "source.get('name'), 'r:score': doc.get('_score'), } for type_ in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type']", "in get_freebase_types(): name = type_.get('name').lower() if not len(prefix) or prefix in name: matches.append(type_)", "Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in query.get('properties', []): proxy.add(p.get('pid'),", "emulates Google Refine API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches", "in model: if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label }) return types def", "domain, 'service_path': url_for('reconcile_api.suggest_type') }, 'property': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes':", "query = MatchQuery(parser, entity=proxy) matches = [] for doc in query.search().get('hits').get('hits'): entity =", "aleph.model import Entity from aleph.search import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from", "import Entity from aleph.search import SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util", "q in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET',", "prop.name.lower() match = match or prefix in prop.label.lower() if match: matches.append({ 'id': prop.name,", "name = query.get('query', '') schema = query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name',", "{ 'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height': 400 },", "matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches =", "'url': entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest': { 'entity': { 'service_url': domain,", "single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz) name", "{ 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for type_ in", "in prop.name.lower() match = match or prefix in prop.label.lower() if match: matches.append({ 'id':", "ValueError: raise BadRequest() queries = {} for k, q in qs.items(): queries[k] =", "for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'), quiet=True) query = MatchQuery(parser, entity=proxy) matches", "is not None: query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source = doc.get('_source')", "json.loads(q) except ValueError: raise BadRequest() else: q = request.values return jsonify(reconcile_op(q)) elif 'queries'", "prefix = request.args.get('prefix', '').lower().strip() matches = [] for prop in model.properties: match =", "unpack_result # See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API blueprint = Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types():", "name = type_.get('name').lower() if not len(prefix) or prefix in name: matches.append(type_) return jsonify({", "} for type_ in get_freebase_types(): if source.get('schema') == type_['id']: match['n:type'] = type_ match['type']", "[] for prop in model.properties: match = not len(prefix) if not match: match", "= match or prefix in prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid': prop.name,", "query.get('query', '') schema = query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', ''))", "query.get('limit', '5'), 'strict': 'false' }, request.authz) name = query.get('query', '') schema = query.get('type')", "[] for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is None: continue", "url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta)", "'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest':", "\"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches", "= Blueprint('reconcile_api', __name__) log = logging.getLogger(__name__) def get_freebase_types(): types = [] for schema", "Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: # single q =", "aleph.core import settings, url_for from aleph.model import Entity from aleph.search import SearchQueryParser from", "'strict': 'false' }, request.authz) name = query.get('query', '') schema = query.get('type') or Entity.THING", "model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API,", "if schema.matchable: types.append({ 'id': schema.name, 'name': schema.label }) return types def reconcile_op(query): \"\"\"Reconcile", "qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def", "aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import jsonify from aleph.logic.util import entity_url from", "in request.values: # single q = request.values.get('query') if q.startswith('{'): try: q = json.loads(q)", "} return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile(): \"\"\" Reconciliation API, emulates Google", "= EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source = doc.get('_source') match = { 'quid':", "entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest': { 'entity': { 'service_url': domain, 'service_path':", "[] parser = SearchQueryParser(args, request.authz) if parser.prefix is not None: query = EntitiesQuery(parser)", "args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = [] parser =", "not None: query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source = doc.get('_source') match", "log = logging.getLogger(__name__) def get_freebase_types(): types = [] for schema in model: if", "model.get_proxy(entity) score = math.ceil(compare(model, proxy, entity) * 100) match = { 'id': entity.id,", "compare from aleph.core import settings, url_for from aleph.model import Entity from aleph.search import", "'5'), 'strict': 'false' }, request.authz) name = query.get('query', '') schema = query.get('type') or", "'suggest': { 'entity': { 'service_url': domain, 'service_path': url_for('reconcile_api.suggest_entity', _authorize=True) }, 'type': { 'service_url':", "from aleph.views.util import jsonify from aleph.logic.util import entity_url from aleph.index.util import unpack_result #", "= json.loads(qs) except ValueError: raise BadRequest() queries = {} for k, q in", "q = request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple requests in", "suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for type_ in get_freebase_types(): name", "for doc in query.search().get('hits').get('hits'): source = doc.get('_source') match = { 'quid': doc.get('_id'), 'id':", "request.args.get('prefix', '').lower().strip() matches = [] for prop in model.properties: match = not len(prefix)", "query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source = doc.get('_source') match = {", "if parser.prefix is not None: query = EntitiesQuery(parser) for doc in query.search().get('hits').get('hits'): source", "for type_ in get_freebase_types(): name = type_.get('name').lower() if not len(prefix) or prefix in", "proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for p in query.get('properties', []): proxy.add(p.get('pid'), p.get('v'),", "queries[k] = reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST']) def suggest_entity():", "matches = [] parser = SearchQueryParser(args, request.authz) if parser.prefix is not None: query", "match = match or prefix in prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid':", "'') schema = query.get('type') or Entity.THING proxy = model.make_entity(schema) proxy.add('name', query.get('query', '')) for", "reconcile(): \"\"\" Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query'", "type_ match['type'] = [type_['name']] matches.append(match) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\":", "for a single query.\"\"\" parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' },", "from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import jsonify from aleph.logic.util import entity_url", "'query' in request.values: # single q = request.values.get('query') if q.startswith('{'): try: q =", "types.append({ 'id': schema.name, 'name': schema.label }) return types def reconcile_op(query): \"\"\"Reconcile operation for", "not match: match = prefix in prop.name.lower() match = match or prefix in", "matches = [] for doc in query.search().get('hits').get('hits'): entity = unpack_result(doc) if entity is", "= request.values.get('queries') try: qs = json.loads(qs) except ValueError: raise BadRequest() queries = {}", "100, 'n:type': { 'id': '/properties/property', 'name': 'Property' } }) return jsonify({ \"code\": \"/api/status/ok\",", "doc.get('_source') match = { 'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), }", "def suggest_type(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for type_ in get_freebase_types():", "if source.get('schema') == type_['id']: match['n:type'] = type_ match['type'] = [type_['name']] matches.append(match) return jsonify({", "'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property', 'name': 'Property' }", "'name': source.get('name'), 'r:score': doc.get('_score'), } for type_ in get_freebase_types(): if source.get('schema') == type_['id']:", "prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type':", "in qs.items(): queries[k] = reconcile_op(q) return jsonify(queries) else: return reconcile_index() @blueprint.route('/api/freebase/suggest', methods=['GET', 'POST'])", "Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values: # single q", "score = math.ceil(compare(model, proxy, entity) * 100) match = { 'id': entity.id, 'name':", "'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile', methods=['GET', 'POST']) def reconcile():", "unpack_result(doc) if entity is None: continue entity = model.get_proxy(entity) score = math.ceil(compare(model, proxy,", "matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type': { 'id': '/properties/property',", "%d matches\", name, len(matches)) return { 'result': matches, 'num': len(matches) } def reconcile_index():", "{ 'result': matches, 'num': len(matches) } def reconcile_index(): domain = settings.APP_UI_URL.strip('/') meta =", "\"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def", "match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name': prop.label, 'r:score': 100, 'n:type': { 'id':", "@blueprint.route('/api/freebase/property', methods=['GET', 'POST']) def suggest_property(): prefix = request.args.get('prefix', '').lower().strip() matches = [] for", "} }, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return jsonify(meta) @blueprint.route('/api/freebase/reconcile',", "'POST']) def suggest_entity(): \"\"\"Suggest API, emulates Google Refine API.\"\"\" args = { 'prefix':", "'quid': doc.get('_id'), 'id': doc.get('_id'), 'name': source.get('name'), 'r:score': doc.get('_score'), } for type_ in get_freebase_types():", "schema.matchable: types.append({ 'id': schema.name, 'name': schema.label }) return types def reconcile_op(query): \"\"\"Reconcile operation", "qs = json.loads(qs) except ValueError: raise BadRequest() queries = {} for k, q", "elif 'queries' in request.values: # multiple requests in one query qs = request.values.get('queries')", "return jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple requests in one query qs", "import BadRequest from followthemoney import model from followthemoney.compare import compare from aleph.core import", "SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz) name = query.get('query', '') schema", "}, 'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest': { 'entity':", "= logging.getLogger(__name__) def get_freebase_types(): types = [] for schema in model: if schema.matchable:", "meta = { 'name': settings.APP_TITLE, 'identifierSpace': 'http://rdf.freebase.com/ns/type.object.id', 'schemaSpace': 'http://rdf.freebase.com/ns/type.object.id', 'view': { 'url': entity_url('{{id}}')", "BadRequest from followthemoney import model from followthemoney.compare import compare from aleph.core import settings,", "parser = SearchQueryParser({ 'limit': query.get('limit', '5'), 'strict': 'false' }, request.authz) name = query.get('query',", "Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi \"\"\" if 'query' in request.values:", "q = request.values.get('query') if q.startswith('{'): try: q = json.loads(q) except ValueError: raise BadRequest()", "Blueprint, request from werkzeug.exceptions import BadRequest from followthemoney import model from followthemoney.compare import", "in name: matches.append(type_) return jsonify({ \"code\": \"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''),", "request.values.get('query') if q.startswith('{'): try: q = json.loads(q) except ValueError: raise BadRequest() else: q", "'view': { 'url': entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height': 400", "entity_url('{{id}}') }, 'preview': { 'url': entity_url('{{id}}'), 'width': 800, 'height': 400 }, 'suggest': {", "API.\"\"\" args = { 'prefix': request.args.get('prefix'), 'filter:schemata': request.args.getlist('type') } matches = [] parser", "= request.values return jsonify(reconcile_op(q)) elif 'queries' in request.values: # multiple requests in one", "\"/api/status/ok\", \"status\": \"200 OK\", \"prefix\": request.args.get('prefix', ''), \"result\": matches }) @blueprint.route('/api/freebase/type', methods=['GET', 'POST'])", "match or prefix in prop.label.lower() if match: matches.append({ 'id': prop.name, 'quid': prop.name, 'name':", "followthemoney.compare import compare from aleph.core import settings, url_for from aleph.model import Entity from", "SearchQueryParser from aleph.search import EntitiesQuery, MatchQuery from aleph.views.util import jsonify from aleph.logic.util import", "= [] parser = SearchQueryParser(args, request.authz) if parser.prefix is not None: query =", "get_freebase_types(): types = [] for schema in model: if schema.matchable: types.append({ 'id': schema.name,", "'service_path': url_for('reconcile_api.suggest_property') } }, 'defaultTypes': [{ 'id': Entity.THING, 'name': model.get(Entity.THING).label }] } return", "get_freebase_types(): name = type_.get('name').lower() if not len(prefix) or prefix in name: matches.append(type_) return" ]
[ "= run('echo \"Hello\"') assert return_value == 0 assert output == ['Hello\\n'] def test_bad_run_method():", "assert return_value == 0 assert output == ['Hello\\n'] def test_bad_run_method(): return_value, output =", "return_value == 0 assert output == ['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary')", "assert output == ['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value ==", "return_value, output = run('not-a-real-binary') assert return_value == 127 assert output == ['/bin/sh: 1:", "assert return_value == 127 assert output == ['/bin/sh: 1: not-a-real-binary: not found\\n'] def", "output = run('echo \"Hello\"') assert return_value == 0 assert output == ['Hello\\n'] def", "assert output == ['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join(): command_elements = ['ls',", "<filename>test/test_shell.py from shell import run, join_command def test_run_method(): return_value, output = run('echo \"Hello\"')", "['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value == 127 assert output", "def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value == 127 assert output ==", "return_value == 127 assert output == ['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join():", "import run, join_command def test_run_method(): return_value, output = run('echo \"Hello\"') assert return_value ==", "from shell import run, join_command def test_run_method(): return_value, output = run('echo \"Hello\"') assert", "def test_run_method(): return_value, output = run('echo \"Hello\"') assert return_value == 0 assert output", "= run('not-a-real-binary') assert return_value == 127 assert output == ['/bin/sh: 1: not-a-real-binary: not", "run('echo \"Hello\"') assert return_value == 0 assert output == ['Hello\\n'] def test_bad_run_method(): return_value,", "join_command def test_run_method(): return_value, output = run('echo \"Hello\"') assert return_value == 0 assert", "== ['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value == 127 assert", "['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join(): command_elements = ['ls', '-lah', 'dir_name'] command", "= ['ls', '-lah', 'dir_name'] command = join_command(command_elements) assert command == 'ls -lah dir_name'", "shell import run, join_command def test_run_method(): return_value, output = run('echo \"Hello\"') assert return_value", "command_elements = ['ls', '-lah', 'dir_name'] command = join_command(command_elements) assert command == 'ls -lah", "output == ['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join(): command_elements = ['ls', '-lah',", "return_value, output = run('echo \"Hello\"') assert return_value == 0 assert output == ['Hello\\n']", "\"Hello\"') assert return_value == 0 assert output == ['Hello\\n'] def test_bad_run_method(): return_value, output", "found\\n'] def test_join(): command_elements = ['ls', '-lah', 'dir_name'] command = join_command(command_elements) assert command", "1: not-a-real-binary: not found\\n'] def test_join(): command_elements = ['ls', '-lah', 'dir_name'] command =", "test_join(): command_elements = ['ls', '-lah', 'dir_name'] command = join_command(command_elements) assert command == 'ls", "== 0 assert output == ['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert", "run('not-a-real-binary') assert return_value == 127 assert output == ['/bin/sh: 1: not-a-real-binary: not found\\n']", "== ['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join(): command_elements = ['ls', '-lah', 'dir_name']", "test_run_method(): return_value, output = run('echo \"Hello\"') assert return_value == 0 assert output ==", "test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value == 127 assert output == ['/bin/sh:", "output = run('not-a-real-binary') assert return_value == 127 assert output == ['/bin/sh: 1: not-a-real-binary:", "== 127 assert output == ['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join(): command_elements", "0 assert output == ['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value", "not-a-real-binary: not found\\n'] def test_join(): command_elements = ['ls', '-lah', 'dir_name'] command = join_command(command_elements)", "127 assert output == ['/bin/sh: 1: not-a-real-binary: not found\\n'] def test_join(): command_elements =", "not found\\n'] def test_join(): command_elements = ['ls', '-lah', 'dir_name'] command = join_command(command_elements) assert", "output == ['Hello\\n'] def test_bad_run_method(): return_value, output = run('not-a-real-binary') assert return_value == 127", "def test_join(): command_elements = ['ls', '-lah', 'dir_name'] command = join_command(command_elements) assert command ==", "run, join_command def test_run_method(): return_value, output = run('echo \"Hello\"') assert return_value == 0" ]
[ "1 while j < k: if nums[i] + nums[j]+nums[k] < target: count +=", "len(nums) - 1 while j < k: if nums[i] + nums[j]+nums[k] < target:", "k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1. class Solution: def threeSumSmaller(self,", "nums[i] + nums[j]+nums[k] < target: count += k-j j += 1 else: k", "from 1 to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else:", "Smaller.py # Sort nums. Run and fix i from 1 to n-2. Run", "j < k: if nums[i] + nums[j]+nums[k] < target: count += k-j j", "for i in range(len(nums)-2): j, k = i+1, len(nums) - 1 while j", "1. class Solution: def threeSumSmaller(self, nums: List[int], target: int) -> int: nums.sort() count", "i+1, len(nums) - 1 while j < k: if nums[i] + nums[j]+nums[k] <", "i in range(len(nums)-2): j, k = i+1, len(nums) - 1 while j <", "k = i+1, len(nums) - 1 while j < k: if nums[i] +", "fix i from 1 to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j,", "and fix i from 1 to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+=", "<filename>259. 3Sum Smaller.py # Sort nums. Run and fix i from 1 to", "nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1. class Solution: def threeSumSmaller(self, nums: List[int],", "def threeSumSmaller(self, nums: List[int], target: int) -> int: nums.sort() count = 0 for", "List[int], target: int) -> int: nums.sort() count = 0 for i in range(len(nums)-2):", "int) -> int: nums.sort() count = 0 for i in range(len(nums)-2): j, k", "0 for i in range(len(nums)-2): j, k = i+1, len(nums) - 1 while", "j, k = i+1, len(nums) - 1 while j < k: if nums[i]", "k-= 1. class Solution: def threeSumSmaller(self, nums: List[int], target: int) -> int: nums.sort()", "= 0 for i in range(len(nums)-2): j, k = i+1, len(nums) - 1", "+ nums[j]+nums[k] < target: count += k-j j += 1 else: k -=", "< target: count += k-j j += 1 else: k -= 1 return", "3Sum Smaller.py # Sort nums. Run and fix i from 1 to n-2.", "-> int: nums.sort() count = 0 for i in range(len(nums)-2): j, k =", "threeSumSmaller(self, nums: List[int], target: int) -> int: nums.sort() count = 0 for i", "# Sort nums. Run and fix i from 1 to n-2. Run j=i+1,", "Solution: def threeSumSmaller(self, nums: List[int], target: int) -> int: nums.sort() count = 0", "j+1; else: k-= 1. class Solution: def threeSumSmaller(self, nums: List[int], target: int) ->", "i from 1 to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1;", "int: nums.sort() count = 0 for i in range(len(nums)-2): j, k = i+1,", "count+= k-j, j+1; else: k-= 1. class Solution: def threeSumSmaller(self, nums: List[int], target:", "- 1 while j < k: if nums[i] + nums[j]+nums[k] < target: count", "if nums[i] + nums[j]+nums[k] < target: count += k-j j += 1 else:", "< k: if nums[i] + nums[j]+nums[k] < target: count += k-j j +=", "else: k-= 1. class Solution: def threeSumSmaller(self, nums: List[int], target: int) -> int:", "class Solution: def threeSumSmaller(self, nums: List[int], target: int) -> int: nums.sort() count =", "Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1. class Solution:", "nums: List[int], target: int) -> int: nums.sort() count = 0 for i in", "target: count += k-j j += 1 else: k -= 1 return count", "Sort nums. Run and fix i from 1 to n-2. Run j=i+1, k=n-1.", "target: int) -> int: nums.sort() count = 0 for i in range(len(nums)-2): j,", "to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1.", "Run and fix i from 1 to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target,", "k-j, j+1; else: k-= 1. class Solution: def threeSumSmaller(self, nums: List[int], target: int)", "while j < k: if nums[i] + nums[j]+nums[k] < target: count += k-j", "range(len(nums)-2): j, k = i+1, len(nums) - 1 while j < k: if", "= i+1, len(nums) - 1 while j < k: if nums[i] + nums[j]+nums[k]", "k: if nums[i] + nums[j]+nums[k] < target: count += k-j j += 1", "in range(len(nums)-2): j, k = i+1, len(nums) - 1 while j < k:", "1 to n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-=", "j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1. class Solution: def", "count = 0 for i in range(len(nums)-2): j, k = i+1, len(nums) -", "nums.sort() count = 0 for i in range(len(nums)-2): j, k = i+1, len(nums)", "nums. Run and fix i from 1 to n-2. Run j=i+1, k=n-1. if", "if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1. class Solution: def threeSumSmaller(self, nums:", "nums[j]+nums[k] < target: count += k-j j += 1 else: k -= 1", "n-2. Run j=i+1, k=n-1. if nums[i]+nums[j]+nums[k]<target, count+= k-j, j+1; else: k-= 1. class" ]
[ "== { 'version': '1.0', 'title': 'example' } assert '/multiply' in swagger['paths'] assert '/exception'", "r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode())", "} assert '/multiply' in swagger['paths'] assert '/exception' in swagger['paths'] # test blueprint is", "400 resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"] is False def", "'1.0', 'title': 'example' } assert '/multiply' in swagger['paths'] assert '/exception' in swagger['paths'] #", "json.loads(r.data.decode()) == 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def", "def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" })", "in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\" in r.data.decode() assert r.status_code", "'title': 'example' } assert '/multiply' in swagger['paths'] assert '/exception' in swagger['paths'] # test", "= json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0', 'title': 'example' } assert '/multiply'", "in swagger['paths'] assert '/exception' in swagger['paths'] # test blueprint is documented as well", "def test_happy_path(test_app): r = test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r =", "'version': '1.0', 'title': 'example' } assert '/multiply' in swagger['paths'] assert '/exception' in swagger['paths']", "'/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\" in r.data.decode() assert", "test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert", "'example' } assert '/multiply' in swagger['paths'] assert '/exception' in swagger['paths'] # test blueprint", "in swagger['paths'] # test blueprint is documented as well assert '/blueprint/foo' in swagger['paths']", "resp[\"success\"] is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"]", "assert resp[\"success\"] is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert", "test blueprint is documented as well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r", "json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"] is False def test_swagger(test_app): r =", "def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version':", "swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\" in r.data.decode() assert r.status_code ==", "test_app.get(\"/exception\") assert r.status_code == 400 resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert", "test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={", "json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code == 400 resp", "= test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) ==", "== \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code == 400 resp =", "json def test_happy_path(test_app): r = test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r", "is documented as well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\")", "9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r", "resp[\"code\"] == 400 assert resp[\"success\"] is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger", "swagger['paths'] # test blueprint is documented as well assert '/blueprint/foo' in swagger['paths'] def", "\"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\")", "blueprint is documented as well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r =", "import json def test_happy_path(test_app): r = test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app):", "r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0', 'title':", "def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code == 400 resp = json.loads(r.data.decode()) assert", "test_happy_path(test_app): r = test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\")", "\"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code == 400 resp = json.loads(r.data.decode())", "{ 'version': '1.0', 'title': 'example' } assert '/multiply' in swagger['paths'] assert '/exception' in", "documented as well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert", "'/exception' in swagger['paths'] # test blueprint is documented as well assert '/blueprint/foo' in", "\"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app):", "assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\"", "test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\"", "test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0',", "test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0', 'title': 'example' }", "is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] ==", "def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\" in r.data.decode() assert r.status_code == 200", "swagger[\"info\"] == { 'version': '1.0', 'title': 'example' } assert '/multiply' in swagger['paths'] assert", "= test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0', 'title': 'example'", "assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code == 400", "well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\" in", "r = test_app.get(\"/exception\") assert r.status_code == 400 resp = json.loads(r.data.decode()) assert resp[\"code\"] ==", "# test blueprint is documented as well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app):", "== 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app):", "\"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\"", "r.status_code == 400 resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"] is", "'/multiply' in swagger['paths'] assert '/exception' in swagger['paths'] # test blueprint is documented as", "= test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}),", "assert '/multiply' in swagger['paths'] assert '/exception' in swagger['paths'] # test blueprint is documented", "swagger['paths'] assert '/exception' in swagger['paths'] # test blueprint is documented as well assert", "== 400 resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"] is False", "= test_app.get(\"/exception\") assert r.status_code == 400 resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400", "assert '/exception' in swagger['paths'] # test blueprint is documented as well assert '/blueprint/foo'", "test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\",", "r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\":", "assert resp[\"code\"] == 400 assert resp[\"success\"] is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\")", "r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\",", "test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] ==", "<reponame>pwesthagen/transmute-core<gh_stars>10-100 import json def test_happy_path(test_app): r = test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def", "def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r =", "= json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"] is False def test_swagger(test_app): r", "resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"] is False def test_swagger(test_app):", "test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code == 400 resp = json.loads(r.data.decode()) assert resp[\"code\"]", "= test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert r.headers[\"x-nothing\"]", "400 assert resp[\"success\"] is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode())", "False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == {", "json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0', 'title': 'example' } assert '/multiply' in", "== 400 assert resp[\"success\"] is False def test_swagger(test_app): r = test_app.get(\"/swagger.json\") swagger =", "headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r", "swagger = json.loads(r.data.decode()) assert swagger[\"info\"] == { 'version': '1.0', 'title': 'example' } assert", "data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def", "r = test_app.get(\"/multiply?left=3&right=3\") assert json.loads(r.data.decode()) == 9 def test_headers(test_app): r = test_app.get(\"/api/v1/header\") assert", "assert r.status_code == 400 resp = json.loads(r.data.decode()) assert resp[\"code\"] == 400 assert resp[\"success\"]", "assert swagger[\"info\"] == { 'version': '1.0', 'title': 'example' } assert '/multiply' in swagger['paths']", "assert r.headers[\"x-nothing\"] == \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\":", "== \"value\" def test_complex(test_app): r = test_app.post(\"/complex/3\", data=json.dumps({\"body\": \"1\"}), headers={ \"header\": \"2\", \"content-type\":", "\"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert", "as well assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\"", "assert '/blueprint/foo' in swagger['paths'] def test_swagger_html(test_app): r = test_app.get(\"/api/\") assert \"/swagger.json\" in r.data.decode()", "}) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code ==", "\"header\": \"2\", \"content-type\": \"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r =", "\"application/json\" }) assert json.loads(r.data.decode()) == \"1:2:3\" def test_api_exception(test_app): r = test_app.get(\"/exception\") assert r.status_code" ]
[ "import ControlBase class ControlProgress(ControlBase): _min = 0 _max = 100 def __init__(self, label", "return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue(", "@property def value(self): return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value ) @property", "value): self._form.horizontalSlider.setValue( value ) @property def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value):", "value(self, value): self._form.horizontalSlider.setValue( value ) @property def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self,", "__init__(self, label = \"%p%\", defaultValue = 0, min = 0, max = 100,", "def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return self._form.horizontalSlider.maximum() @max.setter def max(self, value):", "0, max = 100, helptext=None): self._updateSlider = True self._min = min self._max =", "></div>\"\"\" % ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value", "= \"%p%\", defaultValue = 0, min = 0, max = 100, helptext=None): self._updateSlider", "ControlBase class ControlProgress(ControlBase): _min = 0 _max = 100 def __init__(self, label =", "pyforms.terminal.Controls.ControlBase import ControlBase class ControlProgress(ControlBase): _min = 0 _max = 100 def __init__(self,", "= True self._min = min self._max = max ControlBase.__init__(self, label, defaultValue) def initControl(self):", "\"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def", "self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value ) @property def min(self): return self._form.horizontalSlider.minimum()", "@property def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self):", "0, min = 0, max = 100, helptext=None): self._updateSlider = True self._min =", "= max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" %", "def value(self): return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value ) @property def", "return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value ) @property def min(self): return", "True self._min = min self._max = max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return", "self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter def value(self,", "def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return", "= 100 def __init__(self, label = \"%p%\", defaultValue = 0, min = 0,", "self._min = min self._max = max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div", "from pyforms.terminal.Controls.ControlBase import ControlBase class ControlProgress(ControlBase): _min = 0 _max = 100 def", "class='progressbar' ></div>\"\"\" % ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return", "value(self): return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value ) @property def min(self):", "defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name ) return", "#return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property", ") return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter def value(self, value):", "= 100, helptext=None): self._updateSlider = True self._min = min self._max = max ControlBase.__init__(self,", "ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value )", "self._form.horizontalSlider.setValue( value ) @property def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value)", "@min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return self._form.horizontalSlider.maximum() @max.setter def max(self,", "= min self._max = max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s'", "value ) @property def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property", "= 0, min = 0, max = 100, helptext=None): self._updateSlider = True self._min", "_min = 0 _max = 100 def __init__(self, label = \"%p%\", defaultValue =", "min self._max = max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar'", "self._max = max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\"", "0 _max = 100 def __init__(self, label = \"%p%\", defaultValue = 0, min", "label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name )", "self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return self._form.horizontalSlider.maximum() @max.setter def", "def value(self, value): self._form.horizontalSlider.setValue( value ) @property def min(self): return self._form.horizontalSlider.minimum() @min.setter def", "def __init__(self, label = \"%p%\", defaultValue = 0, min = 0, max =", "min = 0, max = 100, helptext=None): self._updateSlider = True self._min = min", "initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\"", "class ControlProgress(ControlBase): _min = 0 _max = 100 def __init__(self, label = \"%p%\",", "self._updateSlider = True self._min = min self._max = max ControlBase.__init__(self, label, defaultValue) def", "id='id%s' class='progressbar' ></div>\"\"\" % ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self):", "ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name", ") @property def min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def", "def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % ( self._name ) return \"controls.push(new", "= 0 _max = 100 def __init__(self, label = \"%p%\", defaultValue = 0,", "max = 100, helptext=None): self._updateSlider = True self._min = min self._max = max", "100, helptext=None): self._updateSlider = True self._min = min self._max = max ControlBase.__init__(self, label,", "defaultValue = 0, min = 0, max = 100, helptext=None): self._updateSlider = True", "label = \"%p%\", defaultValue = 0, min = 0, max = 100, helptext=None):", "@value.setter def value(self, value): self._form.horizontalSlider.setValue( value ) @property def min(self): return self._form.horizontalSlider.minimum() @min.setter", "min(self): return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return self._form.horizontalSlider.maximum()", "min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return self._form.horizontalSlider.maximum() @max.setter def max(self, value): self._form.horizontalSlider.setMaximum(value)", "ControlProgress(ControlBase): _min = 0 _max = 100 def __init__(self, label = \"%p%\", defaultValue", "helptext=None): self._updateSlider = True self._min = min self._max = max ControlBase.__init__(self, label, defaultValue)", "( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter def", "return self._form.horizontalSlider.minimum() @min.setter def min(self, value): self._form.horizontalSlider.setMinimum(value) @property def max(self): return self._form.horizontalSlider.maximum() @max.setter", "\"%p%\", defaultValue = 0, min = 0, max = 100, helptext=None): self._updateSlider =", "max ControlBase.__init__(self, label, defaultValue) def initControl(self): #return \"\"\"<div id='id%s' class='progressbar' ></div>\"\"\" % (", "_max = 100 def __init__(self, label = \"%p%\", defaultValue = 0, min =", "100 def __init__(self, label = \"%p%\", defaultValue = 0, min = 0, max", "% ( self._name ) return \"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter", "\"controls.push(new ControlProgress('\"+self._name+\"'));\" @property def value(self): return self._value @value.setter def value(self, value): self._form.horizontalSlider.setValue( value", "= 0, max = 100, helptext=None): self._updateSlider = True self._min = min self._max" ]
[ "self.goal_bc = self.BC_MAX # 方向 0は切 self.way = 0 def getSmoothLevel(self): # y", "math from Brake import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s)", "= log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed", "= 0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel = 0 #", "BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level,", "== BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0)", "elif self.bc < self.goal_bc: self.bc += (self.goal_bc - self.bc) / 5.0 # 丸める", "self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB", "self.goal_bc = 0.0 # 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位", "brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10 + 490", "def setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc /", "< 3.33: # 12kph accel = self.getSmoothLevel() * 0.803 elif self.speed < 6.94:", "目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切 self.way = 0 def getSmoothLevel(self): #", "BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ", "brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status def", "BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX", "self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc =", "最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33:", "* 1.3 - self.bc / 1.5) * 0.1 * self.freight if self.speed <", "# 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態", "= self.BC_MAX_EB # 加減速計算 self.speed = self.speed + (accel * 1.3 - self.bc", "= round(self.BC_MAX * self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif", "(math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33: # 12kph", "目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc = self.bc", "brake_status): self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) *", "y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if", "* 0.5 elif self.speed < 12.5: # 45kph accel = self.getSmoothLevel() * 0.333", "> self.goal_bc: self.bc -= (self.bc - self.goal_bc) / 5.0 elif self.bc < self.goal_bc:", "else: self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level", "EB時は力行不可 def setMascon(self, mascon_level): if not self.eb: self.mascon_level = mascon_level else: self.mascon_level =", "12.5: # 45kph accel = self.getSmoothLevel() * 0.333 elif self.speed < 23.5: #", "self.getSmoothLevel() * 0.333 elif self.speed < 23.5: # 84.6kph accel = self.getSmoothLevel() *", "if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc = self.bc elif self.bc > self.goal_bc:", "self.bc < 0.1: self.bc = 0.1 elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB", "self.eb = True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status", "self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if", "advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33: # 12kph accel = self.getSmoothLevel() *", "0.0 # 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed", "self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切 self.way =", "3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転)", "本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status =", "if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status in (BrakeStatues.FIX,", "= self.BC_MAX # 方向 0は切 self.way = 0 def getSmoothLevel(self): # y =", "客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切", "= 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切 self.way = 0", "# ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0", "getSpeed(self): return self.speed def setWay(self, way): self.way = way def getWay(self): return self.way", "1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status):", "return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33: #", "1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切 self.way", "self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed = self.speed + (accel", "* 0.803 elif self.speed < 6.94: # 25kph accel = self.getSmoothLevel() * 0.5", "1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1)", "BrakeStatues.EMER): self.eb = True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif", "== BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 #", "< 23.5: # 84.6kph accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel", "- self.goal_bc) < 0.1: self.goal_bc = self.bc elif self.bc > self.goal_bc: self.bc -=", "12kph accel = self.getSmoothLevel() * 0.803 elif self.speed < 6.94: # 25kph accel", "def setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status =", "self.speed = 0 # マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5", "# 45kph accel = self.getSmoothLevel() * 0.333 elif self.speed < 23.5: # 84.6kph", "(self.bc / self.BC_MAX_EB) * -10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa", "if not self.eb: self.mascon_level = mascon_level else: self.mascon_level = 0 # 0(運転) ~", "= False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc -", "elif self.bc > self.goal_bc: self.bc -= (self.bc - self.goal_bc) / 5.0 elif self.bc", "self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc", "490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める", "self.way = way def getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def", "1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切 self.way = 0 def", "= 0.0 # 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if", "self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル", "2) # 走行抵抗 if self.bc < 0.1: self.bc = 0.1 elif self.bc >", "0.1: self.goal_bc = self.bc elif self.bc > self.goal_bc: self.bc -= (self.bc - self.goal_bc)", "from Brake import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s) self.speed", "= self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed == 0: self.eb = False #", "self.way = 0 def getSmoothLevel(self): # y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 #", "0 def getSpeed(self): return self.speed def setWay(self, way): self.way = way def getWay(self):", "# 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33: # 12kph accel", "self.freight if self.speed < 0: self.speed = 0 def getSpeed(self): return self.speed def", "if self.speed < 0: self.speed = 0 def getSpeed(self): return self.speed def setWay(self,", "0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level #", "0.194 # 最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0:", "+= (self.goal_bc - self.bc) / 5.0 # 丸める self.bc = round(self.bc, 2) #", "= BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb", "self.bc < self.goal_bc: self.bc += (self.goal_bc - self.bc) / 5.0 # 丸める self.bc", "import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s) self.speed = 0", "> self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed = self.speed + (accel *", "class DE10: def __init__(self): # 車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level =", "0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める", "accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない", "elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc", "# 走行抵抗 if self.bc < 0.1: self.bc = 0.1 elif self.bc > self.BC_MAX_EB:", "<reponame>mipsparc/DE15 #coding:utf-8 import math from Brake import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def", "self.getWay() == 0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR,", "# 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self): return (self.bc / self.BC_MAX_EB) * 350", "0.333 elif self.speed < 23.5: # 84.6kph accel = self.getSmoothLevel() * 0.194 #", "else: accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel =", "self.goal_bc) / 5.0 elif self.bc < self.goal_bc: self.bc += (self.goal_bc - self.bc) /", "BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) #", "accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb =", "self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level =", "getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not", "- self.goal_bc) / 5.0 elif self.bc < self.goal_bc: self.bc += (self.goal_bc - self.bc)", "加減速計算 self.speed = self.speed + (accel * 1.3 - self.bc / 1.5) *", "= brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10 +", "self.speed < 0: self.speed = 0 def getSpeed(self): return self.speed def setWay(self, way):", "self.speed < 6.94: # 25kph accel = self.getSmoothLevel() * 0.5 elif self.speed <", "self.bc += (self.goal_bc - self.bc) / 5.0 # 丸める self.bc = round(self.bc, 2)", "0: self.speed = 0 def getSpeed(self): return self.speed def setWay(self, way): self.way =", "1.5) * 0.1 * self.freight if self.speed < 0: self.speed = 0 def", "# 方向 0は切 self.way = 0 def getSmoothLevel(self): # y = log2(x+1) 最大が1", "self.speed = self.speed + (accel * 1.3 - self.bc / 1.5) * 0.1", "self.bc = self.BC_MAX_EB # 加減速計算 self.speed = self.speed + (accel * 1.3 -", "< 0: self.speed = 0 def getSpeed(self): return self.speed def setWay(self, way): self.way", "= self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif", "< 6.94: # 25kph accel = self.getSmoothLevel() * 0.5 elif self.speed < 12.5:", "self.speed def setWay(self, way): self.way = way def getWay(self): return self.way # 0", "self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX =", "setMascon(self, mascon_level): if not self.eb: self.mascon_level = mascon_level else: self.mascon_level = 0 #", "= 0 def getSmoothLevel(self): # y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める", "def setWay(self, way): self.way = way def getWay(self): return self.way # 0 ~", "elif self.speed < 12.5: # 45kph accel = self.getSmoothLevel() * 0.333 elif self.speed", "def getSpeed(self): return self.speed def setWay(self, way): self.way = way def getWay(self): return", "* 0.333 elif self.speed < 23.5: # 84.6kph accel = self.getSmoothLevel() * 0.194", "0.1 elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed = self.speed", "-10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self): return", "非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed == 0:", "= way def getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self,", "# 丸める self.bc = round(self.bc, 2) # 走行抵抗 if self.bc < 0.1: self.bc", "0.1: self.bc = 0.1 elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算", "if self.speed < 3.33: # 12kph accel = self.getSmoothLevel() * 0.803 elif self.speed", "84.6kph accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel = 0 #", "# 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力", "DE10: def __init__(self): # 車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level = 0", "# 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向 0は切 self.way = 0 def getSmoothLevel(self):", "日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level", "self.goal_bc) < 0.1: self.goal_bc = self.bc elif self.bc > self.goal_bc: self.bc -= (self.bc", "= 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0", "1.3 - self.bc / 1.5) * 0.1 * self.freight if self.speed < 0:", "ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc", "10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self): return (self.bc / self.BC_MAX_EB) *", "~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not self.eb: self.mascon_level = mascon_level else:", "= 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if self.eb:", "def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10 + 490 +", "self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE:", "= 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX", "= self.getSmoothLevel() * 0.5 elif self.speed < 12.5: # 45kph accel = self.getSmoothLevel()", "0は切 self.way = 0 def getSmoothLevel(self): # y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0", "self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN:", "self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed == 0: self.eb =", "/ 5.0 elif self.bc < self.goal_bc: self.bc += (self.goal_bc - self.bc) / 5.0", "3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX #", "self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed = self.speed + (accel * 1.3", "0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1:", "< self.goal_bc: self.bc += (self.goal_bc - self.bc) / 5.0 # 丸める self.bc =", "+ 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self): return (self.bc", "0 def getSmoothLevel(self): # y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def", "self.eb: self.mascon_level = mascon_level else: self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力", "def __init__(self): # 車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level = 0 #", "(self.bc - self.goal_bc) / 5.0 elif self.bc < self.goal_bc: self.bc += (self.goal_bc -", "return self.speed def setWay(self, way): self.way = way def getWay(self): return self.way #", "self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX", "__init__(self): # 車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力", "= self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない if", "= self.speed + (accel * 1.3 - self.bc / 1.5) * 0.1 *", "# ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) -", "# 停車で復位 if self.speed == 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない #", "常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status", "# 非常ブレーキ if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed ==", "25kph accel = self.getSmoothLevel() * 0.5 elif self.speed < 12.5: # 45kph accel", "= 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True", "self.bc > self.goal_bc: self.bc -= (self.bc - self.goal_bc) / 5.0 elif self.bc <", "self.goal_bc = self.bc elif self.bc > self.goal_bc: self.bc -= (self.bc - self.goal_bc) /", "+ 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self): return (self.bc / self.BC_MAX_EB)", "6.94: # 25kph accel = self.getSmoothLevel() * 0.5 elif self.speed < 12.5: #", "return (self.bc / self.BC_MAX_EB) * -10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める #", "= 0 def getSpeed(self): return self.speed def setWay(self, way): self.way = way def", "方向 0は切 self.way = 0 def getSmoothLevel(self): # y = log2(x+1) 最大が1 return", "== 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない", "if self.bc < 0.1: self.bc = 0.1 elif self.bc > self.BC_MAX_EB: self.bc =", "def setMascon(self, mascon_level): if not self.eb: self.mascon_level = mascon_level else: self.mascon_level = 0", "のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status", "self.BC_MAX_EB # 加減速計算 self.speed = self.speed + (accel * 1.3 - self.bc /", "< 12.5: # 45kph accel = self.getSmoothLevel() * 0.333 elif self.speed < 23.5:", "#coding:utf-8 import math from Brake import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self):", "停車で復位 if self.speed == 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく", "mascon_level else: self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level):", "ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0 #", "= self.bc elif self.bc > self.goal_bc: self.bc -= (self.bc - self.goal_bc) / 5.0", "self.BC_MAX # 方向 0は切 self.way = 0 def getSmoothLevel(self): # y = log2(x+1)", "+ (accel * 1.3 - self.bc / 1.5) * 0.1 * self.freight if", "self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif self.brake_status", "self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc =", "# 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not self.eb: self.mascon_level =", "# 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力", "elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed = self.speed +", "# 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX # 方向", "0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER):", "def advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33: # 12kph accel = self.getSmoothLevel()", "self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self):", "# 25kph accel = self.getSmoothLevel() * 0.5 elif self.speed < 12.5: # 45kph", "-= (self.bc - self.goal_bc) / 5.0 elif self.bc < self.goal_bc: self.bc += (self.goal_bc", "self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2)", "# 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s) self.speed = 0 # マスコンノッチ(0-14)", "elif self.speed < 6.94: # 25kph accel = self.getSmoothLevel() * 0.5 elif self.speed", "not self.eb: self.mascon_level = mascon_level else: self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ)", "5.0 # 丸める self.bc = round(self.bc, 2) # 走行抵抗 if self.bc < 0.1:", "* 0.194 # 最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay() ==", "= False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX", "490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self): return (self.bc /", "= 0 # マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 #", "= True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status ==", "* self.freight if self.speed < 0: self.speed = 0 def getSpeed(self): return self.speed", "0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not self.eb: self.mascon_level = mascon_level", "True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE:", "elif self.speed < 23.5: # 84.6kph accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる", "self.bc elif self.bc > self.goal_bc: self.bc -= (self.bc - self.goal_bc) / 5.0 elif", "14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not self.eb: self.mascon_level = mascon_level else: self.mascon_level", "丸める self.bc = round(self.bc, 2) # 走行抵抗 if self.bc < 0.1: self.bc =", "== BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc", "- 1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機:", "# ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return", "if self.speed == 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく #", "elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc =", "if self.eb: self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed == 0: self.eb", "= mascon_level else: self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self,", "* 0.1 * self.freight if self.speed < 0: self.speed = 0 def getSpeed(self):", "self.speed = 0 def getSpeed(self): return self.speed def setWay(self, way): self.way = way", "import math from Brake import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): #", "BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb =", "# 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb = False", "False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc)", "0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 #", "ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc", "= brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self): #", "self.speed < 12.5: # 45kph accel = self.getSmoothLevel() * 0.333 elif self.speed <", "self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not self.eb: self.mascon_level", "False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc = self.BC_MAX #", "0.5 elif self.speed < 12.5: # 45kph accel = self.getSmoothLevel() * 0.333 elif", "* -10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def getBc(self):", "車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB =", "self.goal_bc = self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed == 0: self.eb = False", "# マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2", "走行抵抗 if self.bc < 0.1: self.bc = 0.1 elif self.bc > self.BC_MAX_EB: self.bc", "BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc =", "0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if self.eb: self.goal_bc", "2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc", "round(self.bc, 2) # 走行抵抗 if self.bc < 0.1: self.bc = 0.1 elif self.bc", "def getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if", "~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self,", "if self.getWay() == 0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in", "self.bc) / 5.0 # 丸める self.bc = round(self.bc, 2) # 走行抵抗 if self.bc", "self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE):", "(BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX *", "in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc = self.BC_MAX elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX", "5.0 elif self.bc < self.goal_bc: self.bc += (self.goal_bc - self.bc) / 5.0 #", "(self.goal_bc - self.bc) / 5.0 # 丸める self.bc = round(self.bc, 2) # 走行抵抗", "0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif", "* self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status ==", "self.speed + (accel * 1.3 - self.bc / 1.5) * 0.1 * self.freight", "< 0.1: self.bc = 0.1 elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB #", "accel = self.getSmoothLevel() * 0.803 elif self.speed < 6.94: # 25kph accel =", "/ 5.0 # 丸める self.bc = round(self.bc, 2) # 走行抵抗 if self.bc <", "setWay(self, way): self.way = way def getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力", "in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc", "accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel = 0", "# 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10 + 490 + 10 #", "# ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status", "= 0.1 elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed =", "self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0", "round(self.BC_MAX * self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0 elif self.brake_status", "setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB)", "(accel * 1.3 - self.bc / 1.5) * 0.1 * self.freight if self.speed", "self.speed == 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc:", "0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if", "# 加速度を求める(m/s2) if self.speed < 3.33: # 12kph accel = self.getSmoothLevel() * 0.803", "< 0.1: self.goal_bc = self.bc elif self.bc > self.goal_bc: self.bc -= (self.bc -", "elif self.brake_status == BrakeStatues.LOWER_BRAKE: self.goal_bc = 0.0 # 非常ブレーキ if self.eb: self.goal_bc =", "setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status", "減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc = self.bc elif self.bc >", "self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc =", "self.speed < 23.5: # 84.6kph accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else:", "BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s) self.speed = 0 #", "log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed <", "# 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if", "0.1 * self.freight if self.speed < 0: self.speed = 0 def getSpeed(self): return", "= self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level", "self.bc -= (self.bc - self.goal_bc) / 5.0 elif self.bc < self.goal_bc: self.bc +=", "最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel", "self.bc = 0.1 elif self.bc > self.BC_MAX_EB: self.bc = self.BC_MAX_EB # 加減速計算 self.speed", "def getSmoothLevel(self): # y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self):", "/ self.BC_MAX_EB) * -10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる", "self.brake_level = 0 # 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight =", "accel = self.getSmoothLevel() * 0.333 elif self.speed < 23.5: # 84.6kph accel =", "brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def setBrakeStatus(self, brake_status): self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る", "# y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): # 加速度を求める(m/s2)", "Brake import BrakeStatues # 日本国有鉄道DE10液体式ディーゼル機関車の動作を再現するライブラリ class DE10: def __init__(self): # 車速(m/s) self.speed =", "# 車速(m/s) self.speed = 0 # マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB", "self.getSmoothLevel() * 0.5 elif self.speed < 12.5: # 45kph accel = self.getSmoothLevel() *", "/ 1.5) * 0.1 * self.freight if self.speed < 0: self.speed = 0", "# 84.6kph accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel = 0", "(BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status in (BrakeStatues.FIX, BrakeStatues.MAX_BRAKE): self.goal_bc =", "way def getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level):", "0 # マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力", "- self.bc) / 5.0 # 丸める self.bc = round(self.bc, 2) # 走行抵抗 if", "self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level =", "非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度) self.bc", "0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level # ブレーキ装置の状態(非常、ブレーキ、ユルメ…)を入力 def", "self.setMascon(0) # 停車で復位 if self.speed == 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない", "== 0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR,", "= 3.0 # ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX #", "return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可 def setMascon(self, mascon_level): if not self.eb:", "ブレーキシリンダ圧力(減速度) self.bc = self.BC_MAX # ブレーキ装置状態 self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ)", "self.goal_bc: self.bc += (self.goal_bc - self.bc) / 5.0 # 丸める self.bc = round(self.bc,", "self.getSmoothLevel() * 0.803 elif self.speed < 6.94: # 25kph accel = self.getSmoothLevel() *", "self.brake_status = BrakeStatues.FIX # 0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態", "self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif self.brake_status == BrakeStatues.RUN: self.goal_bc = 0.0", "# bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc = self.bc elif", "= round(self.bc, 2) # 走行抵抗 if self.bc < 0.1: self.bc = 0.1 elif", "= self.getSmoothLevel() * 0.803 elif self.speed < 6.94: # 25kph accel = self.getSmoothLevel()", "23.5: # 84.6kph accel = self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel =", "0(運転) - 1(全ブレーキ) のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb = False #", "0.1秒進める def advanceTime(self): # 加速度を求める(m/s2) if self.speed < 3.33: # 12kph accel =", "self.getSmoothLevel() * 0.194 # 最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay()", "# 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX = 3.0 # ブレーキシリンダ圧力(減速度)", "3.33: # 12kph accel = self.getSmoothLevel() * 0.803 elif self.speed < 6.94: #", "self.brake_status = brake_status def getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10", "self.BC_MAX_EB) * -10 + 490 + 10 # 実際のブレーキ管圧力を便宜上のブレーキシリンダ圧力値から求める # ブレーキ管圧力は通常490kPa 140kPa減圧して350kPaになると最大がかかる def", "加速度を求める(m/s2) if self.speed < 3.33: # 12kph accel = self.getSmoothLevel() * 0.803 elif", "切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status", "= 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def setBrake(self, brake_level): self.brake_level = brake_level", "# 0.1秒あたりのブレーキ作用・寛解 ここは実物に則さない # 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) <", "getBp(self): # 490から始まって、BCが増えるごとに減る return (self.bc / self.BC_MAX_EB) * -10 + 490 + 10", "0 # 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 #", "# 最高速度では加速は0になる else: accel = 0 # 切位置時は空吹かしになって加速はしない if self.getWay() == 0: print('空吹かし')", "abs(self.bc - self.goal_bc) < 0.1: self.goal_bc = self.bc elif self.bc > self.goal_bc: self.bc", "# 目標ブレーキシリンダ圧力を設定すると、ゆっくりとそこに向けて動いていく # bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc =", "= self.getSmoothLevel() * 0.333 elif self.speed < 23.5: # 84.6kph accel = self.getSmoothLevel()", "= 0 # 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1", "elif self.brake_status == BrakeStatues.BRAKE: self.goal_bc = round(self.BC_MAX * self.brake_level, 2) elif self.brake_status ==", "マスコンノッチ(0-14) self.mascon_level = 0 # 非常ブレーキシリンダ圧力 self.BC_MAX_EB = 3.5 # 常用最大ブレーキシリンダ圧力 本物は5.7kg/cm2 self.BC_MAX", "0.803 elif self.speed < 6.94: # 25kph accel = self.getSmoothLevel() * 0.5 elif", "- self.bc / 1.5) * 0.1 * self.freight if self.speed < 0: self.speed", "ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb = True elif self.brake_status in", "のブレーキレベル self.brake_level = 0 # 非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight", "print('空吹かし') accel = 0 # ブレーキ装置状態から目標ブレーキシリンダ圧を求める if self.brake_status in (BrakeStatues.ERROR_SENSOR, BrakeStatues.ERROR, BrakeStatues.EMER): self.eb", "self.mascon_level = mascon_level else: self.mascon_level = 0 # 0(運転) ~ 1(全ブレーキ) のブレーキレベルを入力 def", "self.speed < 3.33: # 12kph accel = self.getSmoothLevel() * 0.803 elif self.speed <", "self.bc = round(self.bc, 2) # 走行抵抗 if self.bc < 0.1: self.bc = 0.1", "self.bc / 1.5) * 0.1 * self.freight if self.speed < 0: self.speed =", "非常ブレーキ状態 self.eb = False # 客貨車牽引時の加速度減少(単機: 1) self.freight = 1 # 目標ブレーキシリンダ圧力 self.goal_bc", "accel = self.getSmoothLevel() * 0.5 elif self.speed < 12.5: # 45kph accel =", "self.goal_bc: self.bc -= (self.bc - self.goal_bc) / 5.0 elif self.bc < self.goal_bc: self.bc", "# 加減速計算 self.speed = self.speed + (accel * 1.3 - self.bc / 1.5)", "mascon_level): if not self.eb: self.mascon_level = mascon_level else: self.mascon_level = 0 # 0(運転)", "45kph accel = self.getSmoothLevel() * 0.333 elif self.speed < 23.5: # 84.6kph accel", "self.BC_MAX_EB self.setMascon(0) # 停車で復位 if self.speed == 0: self.eb = False # 0.1秒あたりのブレーキ作用・寛解", "getSmoothLevel(self): # y = log2(x+1) 最大が1 return (math.log2(self.mascon_level+1))/4.0 # 0.1秒進める def advanceTime(self): #", "bc: 減速度(m/s2)とする。ここも実物に則さない if abs(self.bc - self.goal_bc) < 0.1: self.goal_bc = self.bc elif self.bc", "# 12kph accel = self.getSmoothLevel() * 0.803 elif self.speed < 6.94: # 25kph", "way): self.way = way def getWay(self): return self.way # 0 ~ 14のマスコンノッチを入力 EB時は力行不可" ]
[ "plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2 * radius, 0], [0,", "incident ray with the mirror def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return", "return radius - q # the y-coordinate of the intersection of the incident", "np.sin(ref_angle(inc_angle)) return radius - q # the y-coordinate of the intersection of the", "= {:.1f} mm. Focal length = {:.1f} mm. Source position = {:.1f} mm.\\nMaximum", "* np.sin(phi) # line equation for extension of the reflected ray def line(inc_angle,", "focal length of the mirror y = np.linspace(-radius, radius, 1000) # mirror equation", "the line connecting the point of incidence # of the ray on the", "abs(z_0) > abs(2 * radius): z_ref = np.array([surface(h), z_0]) y_ref = np.array([h, 0])", "radius of the mirror in mm (must be positive) angle_d = 30 #", "height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1)", "ang * np.pi / 180 h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc", "z_0 > 0 else -z_0 z_ref = np.array([surface(h), -2 * radius]) y_ref =", "incidence of the incident beam in degrees num_rays = 21 # number of", "= {:.1f} mm. Source position = {:.1f} mm.\\nMaximum incident angle = {:.1f} deg.", "= {:.1f} mm.\\nMaximum incident angle = {:.1f} deg. Number of rays = {}\".format(radius,", "source position in mm (must be positive) focal_length = radius / 2 #", "of the mirror def epsilon(inc_angle): q = radius - source_pos return np.arcsin(q /", "if z_0 > 0 else z_0 else: z_0 = z_0 if z_0 >", "lw=1) # draw incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 = -2", "- 2 * epsilon(inc_angle) # the z-coordinate of the intersection of the reflected", "equation for extension of the reflected ray def line(inc_angle, z, z0): return np.tan(inc_angle)", "abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2 * radius): z_ref = np.array([surface(h), z_0])", "angle_d, num_rays): inc_angle = ang * np.pi / 180 h = height(inc_angle) z_inc", "mirror equation z = sqrt(R^2 - y^2) - R def surface(y): return np.sqrt(radius", "# mirror equation z = sqrt(R^2 - y^2) - R def surface(y): return", "visualization plt.plot([-2 * radius, 0], [0, 0]) # axis of the mirror plt.plot([-focal_length],", "> abs(focal_length) and abs(z_0) > abs(2 * radius): z_ref = np.array([surface(h), z_0]) y_ref", "ray on the mirror and the center of curvature of the mirror def", "plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2 * radius, 0], [0, 0]) #", "z, z0): return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) #", "the ray on the mirror and the center of curvature of the mirror", "plt import numpy as np from numpy.lib.function_base import angle radius = 100 #", "point for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang * np.pi /", "line equation for extension of the reflected ray def line(inc_angle, z, z0): return", "radius * np.sin(phi) # line equation for extension of the reflected ray def", "of the reflected ray with the axis def ref_z(inc_angle): q = radius *", "np.sin(inc_angle)) # angle of reflected ray def ref_angle(inc_angle): return inc_angle - 2 *", "import angle radius = 100 # curvature radius of the mirror in mm", "connecting the point of incidence # of the ray on the mirror and", "2 - y ** 2) - radius # angle between the incident ray", "# angle between the incident ray and the line connecting the point of", "0]) # axis of the mirror plt.plot([-focal_length], [0], 'o') # focal point for", "epsilon(inc_angle): q = radius - source_pos return np.arcsin(q / radius * np.sin(inc_angle)) #", "focal_length = radius / 2 # focal length of the mirror y =", "mm. Focal length = {:.1f} mm. Source position = {:.1f} mm.\\nMaximum incident angle", "mirror plt.plot([-focal_length], [0], 'o') # focal point for ang in np.linspace(-angle_d, angle_d, num_rays):", "* focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2 * radius): z_ref", "lw=1) plt.title(\"Radius = {:.1f} mm. Focal length = {:.1f} mm. Source position =", "the mirror plt.plot([-focal_length], [0], 'o') # focal point for ang in np.linspace(-angle_d, angle_d,", "z_0 = -2 * radius if source_pos >= focal_length: z_0 = -z_0 if", "import matplotlib.pyplot as plt import numpy as np from numpy.lib.function_base import angle radius", "ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang * np.pi / 180 h", "'k', lw=1) # draw incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 =", "return inc_angle - 2 * epsilon(inc_angle) # the z-coordinate of the intersection of", "{:.1f} deg. Number of rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\")", "Number of rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\")", "= sqrt(R^2 - y^2) - R def surface(y): return np.sqrt(radius ** 2 -", "incident beam in degrees num_rays = 21 # number of rays source_pos =", "radius, 1000) # mirror equation z = sqrt(R^2 - y^2) - R def", "z_0)]) if abs(source_pos) < abs(2 * focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0)", "beam in degrees num_rays = 21 # number of rays source_pos = 80", "the z-coordinate of the intersection of the reflected ray with the axis def", "np.array([surface(h), -2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if", "# the z-coordinate of the intersection of the reflected ray with the axis", "equation z = sqrt(R^2 - y^2) - R def surface(y): return np.sqrt(radius **", "of the incident ray with the mirror def height(inc_angle): phi = ref_angle(inc_angle) +", "y_inc, 'k', lw=1) # draw incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0", "inc_angle - 2 * epsilon(inc_angle) # the z-coordinate of the intersection of the", "* np.pi / 180 h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc =", "return np.sqrt(radius ** 2 - y ** 2) - radius # angle between", "= {:.1f} deg. Number of rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z,", "angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius) plt.xlim(-2 * radius, 0) plt.grid()", "intersection of the reflected ray with the axis def ref_z(inc_angle): q = radius", "radius # angle between the incident ray and the line connecting the point", "mm.\\nMaximum incident angle = {:.1f} deg. Number of rays = {}\".format(radius, focal_length, -source_pos,", "radius - q # the y-coordinate of the intersection of the incident ray", "radius, z_0)]) if abs(source_pos) < abs(2 * focal_length) and abs(source_pos) > abs(focal_length) and", "radius, 0], [0, 0]) # axis of the mirror plt.plot([-focal_length], [0], 'o') #", "mirror in mm (must be positive) angle_d = 30 # maximum angle of", "= ang * np.pi / 180 h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)])", "curvature of the mirror def epsilon(inc_angle): q = radius - source_pos return np.arcsin(q", "def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q", "the mirror and the center of curvature of the mirror def epsilon(inc_angle): q", "incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 * radius if", "plt.plot([-2 * radius, 0], [0, 0]) # axis of the mirror plt.plot([-focal_length], [0],", "from numpy.lib.function_base import angle radius = 100 # curvature radius of the mirror", "abs(2 * focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2 * radius):", "line(inc_angle, z, z0): return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y)", "be positive) angle_d = 30 # maximum angle of incidence of the incident", "= 30 # maximum angle of incidence of the incident beam in degrees", "in degrees num_rays = 21 # number of rays source_pos = 80 #", "of the reflected ray def line(inc_angle, z, z0): return np.tan(inc_angle) * (z -", "z_0 = -z_0 if z_0 > 0 else z_0 else: z_0 = z_0", "q = radius - source_pos return np.arcsin(q / radius * np.sin(inc_angle)) # angle", "= 21 # number of rays source_pos = 80 # source position in", "ray def ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle) # the z-coordinate of", "epsilon(inc_angle) return radius * np.sin(phi) # line equation for extension of the reflected", "def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi) # line", "np.arcsin(q / radius * np.sin(inc_angle)) # angle of reflected ray def ref_angle(inc_angle): return", "position in mm (must be positive) focal_length = radius / 2 # focal", "q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q # the", "reflected ray def ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle) # the z-coordinate", "ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi) # line equation for extension of", "in mm (must be positive) focal_length = radius / 2 # focal length", "the y-coordinate of the intersection of the incident ray with the mirror def", "1000) # mirror equation z = sqrt(R^2 - y^2) - R def surface(y):", "h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc,", "plt.plot([-focal_length], [0], 'o') # focal point for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle", "the intersection of the reflected ray with the axis def ref_z(inc_angle): q =", "and abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2 * radius): z_ref = np.array([surface(h),", "z0): return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror", "* radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos) <", "np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident", "> 0 else -z_0 z_ref = np.array([surface(h), -2 * radius]) y_ref = np.array([h,", "radius if source_pos >= focal_length: z_0 = -z_0 if z_0 > 0 else", "= radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q # the y-coordinate", "np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q # the y-coordinate of the intersection", "axis def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius -", "z_0 = z_0 if z_0 > 0 else -z_0 z_ref = np.array([surface(h), -2", "focal_length: z_0 = -z_0 if z_0 > 0 else z_0 else: z_0 =", "the mirror in mm (must be positive) angle_d = 30 # maximum angle", "radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos) < abs(2", "np.linspace(-radius, radius, 1000) # mirror equation z = sqrt(R^2 - y^2) - R", "else z_0 else: z_0 = z_0 if z_0 > 0 else -z_0 z_ref", "angle_d = 30 # maximum angle of incidence of the incident beam in", "np from numpy.lib.function_base import angle radius = 100 # curvature radius of the", "* np.sin(inc_angle)) # angle of reflected ray def ref_angle(inc_angle): return inc_angle - 2", "in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang * np.pi / 180 h =", "'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal length = {:.1f} mm. Source position", "21 # number of rays source_pos = 80 # source position in mm", "number of rays source_pos = 80 # source position in mm (must be", "np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal length =", "and abs(z_0) > abs(2 * radius): z_ref = np.array([surface(h), z_0]) y_ref = np.array([h,", "(z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2 *", "surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident beam", "np.pi / 180 h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0,", "the mirror def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi)", "positive) focal_length = radius / 2 # focal length of the mirror y", "of the ray on the mirror and the center of curvature of the", "180 h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc,", "# line equation for extension of the reflected ray def line(inc_angle, z, z0):", "length = {:.1f} mm. Source position = {:.1f} mm.\\nMaximum incident angle = {:.1f}", "of curvature of the mirror def epsilon(inc_angle): q = radius - source_pos return", "= radius / 2 # focal length of the mirror y = np.linspace(-radius,", "source_pos return np.arcsin(q / radius * np.sin(inc_angle)) # angle of reflected ray def", "-2 * radius, z_0)]) if abs(source_pos) < abs(2 * focal_length) and abs(source_pos) >", "as plt import numpy as np from numpy.lib.function_base import angle radius = 100", "num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius) plt.xlim(-2 * radius, 0) plt.grid() plt.show()", "the reflected ray with the axis def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle))", "in mm (must be positive) angle_d = 30 # maximum angle of incidence", "draw incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 * radius", "Focal length = {:.1f} mm. Source position = {:.1f} mm.\\nMaximum incident angle =", "maximum angle of incidence of the incident beam in degrees num_rays = 21", "z_0 if z_0 > 0 else -z_0 z_ref = np.array([surface(h), -2 * radius])", "extension of the reflected ray def line(inc_angle, z, z0): return np.tan(inc_angle) * (z", "if abs(source_pos) < abs(2 * focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0) >", "np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface visualization", "# axis of the mirror plt.plot([-focal_length], [0], 'o') # focal point for ang", "= height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k',", "z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 * radius if source_pos >=", "radius): z_ref = np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1)", "** 2 - y ** 2) - radius # angle between the incident", "80 # source position in mm (must be positive) focal_length = radius /", "axis of the mirror plt.plot([-focal_length], [0], 'o') # focal point for ang in", "y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal length = {:.1f} mm. Source", "surface(y): return np.sqrt(radius ** 2 - y ** 2) - radius # angle", "- y ** 2) - radius # angle between the incident ray and", "of the incident beam in degrees num_rays = 21 # number of rays", "np.isnan(z_0): z_0 = -2 * radius if source_pos >= focal_length: z_0 = -z_0", "def epsilon(inc_angle): q = radius - source_pos return np.arcsin(q / radius * np.sin(inc_angle))", "0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal length = {:.1f}", "z_0 > 0 else z_0 else: z_0 = z_0 if z_0 > 0", "# maximum angle of incidence of the incident beam in degrees num_rays =", "Source position = {:.1f} mm.\\nMaximum incident angle = {:.1f} deg. Number of rays", "R def surface(y): return np.sqrt(radius ** 2 - y ** 2) - radius", "z = sqrt(R^2 - y^2) - R def surface(y): return np.sqrt(radius ** 2", "inc_angle = ang * np.pi / 180 h = height(inc_angle) z_inc = np.array([-source_pos,", "# focal point for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang *", "2) - radius # angle between the incident ray and the line connecting", "of the mirror plt.plot([-focal_length], [0], 'o') # focal point for ang in np.linspace(-angle_d,", "* radius if source_pos >= focal_length: z_0 = -z_0 if z_0 > 0", "mirror y = np.linspace(-radius, radius, 1000) # mirror equation z = sqrt(R^2 -", "of rays source_pos = 80 # source position in mm (must be positive)", "np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang * np.pi / 180 h = height(inc_angle)", "plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal length = {:.1f} mm.", "{:.1f} mm. Focal length = {:.1f} mm. Source position = {:.1f} mm.\\nMaximum incident", "height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi) # line equation", "ray with the mirror def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius", "- z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2 * radius,", "> 0 else z_0 else: z_0 = z_0 if z_0 > 0 else", "deg. Number of rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r,", "= np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal length", "100 # curvature radius of the mirror in mm (must be positive) angle_d", "np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident beam z_0 = ref_z(inc_angle)", "< abs(2 * focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2 *", "= np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius =", "mm (must be positive) angle_d = 30 # maximum angle of incidence of", "# source position in mm (must be positive) focal_length = radius / 2", "radius - source_pos return np.arcsin(q / radius * np.sin(inc_angle)) # angle of reflected", "of incidence # of the ray on the mirror and the center of", "num_rays): inc_angle = ang * np.pi / 180 h = height(inc_angle) z_inc =", "incident ray and the line connecting the point of incidence # of the", "ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 * radius if source_pos >= focal_length: z_0", "# of the ray on the mirror and the center of curvature of", "return np.arcsin(q / radius * np.sin(inc_angle)) # angle of reflected ray def ref_angle(inc_angle):", "be positive) focal_length = radius / 2 # focal length of the mirror", "# mirror surface visualization plt.plot([-2 * radius, 0], [0, 0]) # axis of", "point of incidence # of the ray on the mirror and the center", "> abs(2 * radius): z_ref = np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref,", "= np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident beam z_0 =", "{:.1f} mm.\\nMaximum incident angle = {:.1f} deg. Number of rays = {}\".format(radius, focal_length,", "{}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius) plt.xlim(-2 *", "np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos) < abs(2 * focal_length) and", "else: z_0 = z_0 if z_0 > 0 else -z_0 z_ref = np.array([surface(h),", "reflected ray def line(inc_angle, z, z0): return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13,", "# number of rays source_pos = 80 # source position in mm (must", "** 2) - radius # angle between the incident ray and the line", "-source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius) plt.xlim(-2 * radius, 0)", "(must be positive) focal_length = radius / 2 # focal length of the", "mirror def epsilon(inc_angle): q = radius - source_pos return np.arcsin(q / radius *", "- y^2) - R def surface(y): return np.sqrt(radius ** 2 - y **", "the incident ray with the mirror def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle)", "incidence # of the ray on the mirror and the center of curvature", "plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0):", "of the intersection of the reflected ray with the axis def ref_z(inc_angle): q", "mm (must be positive) focal_length = radius / 2 # focal length of", "positive) angle_d = 30 # maximum angle of incidence of the incident beam", "radius / 2 # focal length of the mirror y = np.linspace(-radius, radius,", "the mirror def epsilon(inc_angle): q = radius - source_pos return np.arcsin(q / radius", "the point of incidence # of the ray on the mirror and the", "/ np.sin(ref_angle(inc_angle)) return radius - q # the y-coordinate of the intersection of", "focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius) plt.xlim(-2 * radius,", "rays source_pos = 80 # source position in mm (must be positive) focal_length", "# curvature radius of the mirror in mm (must be positive) angle_d =", "matplotlib.pyplot as plt import numpy as np from numpy.lib.function_base import angle radius =", "2 # focal length of the mirror y = np.linspace(-radius, radius, 1000) #", "y) # mirror surface visualization plt.plot([-2 * radius, 0], [0, 0]) # axis", "y-coordinate of the intersection of the incident ray with the mirror def height(inc_angle):", "= ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 * radius if source_pos >= focal_length:", "angle between the incident ray and the line connecting the point of incidence", "= ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi) # line equation for extension", "-z_0 if z_0 > 0 else z_0 else: z_0 = z_0 if z_0", "- q # the y-coordinate of the intersection of the incident ray with", "z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm.", "and the center of curvature of the mirror def epsilon(inc_angle): q = radius", "* epsilon(inc_angle) # the z-coordinate of the intersection of the reflected ray with", "plt.title(\"Radius = {:.1f} mm. Focal length = {:.1f} mm. Source position = {:.1f}", "return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface", "z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2 * radius, 0],", "of incidence of the incident beam in degrees num_rays = 21 # number", "z_0 else: z_0 = z_0 if z_0 > 0 else -z_0 z_ref =", "the center of curvature of the mirror def epsilon(inc_angle): q = radius -", "np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f}", "line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos) < abs(2 * focal_length) and abs(source_pos)", "= radius - source_pos return np.arcsin(q / radius * np.sin(inc_angle)) # angle of", "ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q #", "- source_pos return np.arcsin(q / radius * np.sin(inc_angle)) # angle of reflected ray", "z-coordinate of the intersection of the reflected ray with the axis def ref_z(inc_angle):", "[0, 0]) # axis of the mirror plt.plot([-focal_length], [0], 'o') # focal point", "reflected ray with the axis def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) /", "source_pos = 80 # source position in mm (must be positive) focal_length =", "of rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius,", "ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle) # the z-coordinate of the intersection", "q # the y-coordinate of the intersection of the incident ray with the", "30 # maximum angle of incidence of the incident beam in degrees num_rays", "y^2) - R def surface(y): return np.sqrt(radius ** 2 - y ** 2)", "-z_0 z_ref = np.array([surface(h), -2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 *", "= np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos) < abs(2 * focal_length)", "between the incident ray and the line connecting the point of incidence #", "* (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2", "0 else z_0 else: z_0 = z_0 if z_0 > 0 else -z_0", "-2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos)", "= z_0 if z_0 > 0 else -z_0 z_ref = np.array([surface(h), -2 *", "phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi) # line equation for", "0 else -z_0 z_ref = np.array([surface(h), -2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle),", "sqrt(R^2 - y^2) - R def surface(y): return np.sqrt(radius ** 2 - y", "- radius # angle between the incident ray and the line connecting the", "z_ref = np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius", "y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident beam z_0", "- R def surface(y): return np.sqrt(radius ** 2 - y ** 2) -", "numpy as np from numpy.lib.function_base import angle radius = 100 # curvature radius", "surface visualization plt.plot([-2 * radius, 0], [0, 0]) # axis of the mirror", "the incident beam in degrees num_rays = 21 # number of rays source_pos", "return radius * np.sin(phi) # line equation for extension of the reflected ray", "= 100 # curvature radius of the mirror in mm (must be positive)", "length of the mirror y = np.linspace(-radius, radius, 1000) # mirror equation z", "ray def line(inc_angle, z, z0): return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8))", "position = {:.1f} mm.\\nMaximum incident angle = {:.1f} deg. Number of rays =", "{:.1f} mm. Source position = {:.1f} mm.\\nMaximum incident angle = {:.1f} deg. Number", "of reflected ray def ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle) # the", "source_pos >= focal_length: z_0 = -z_0 if z_0 > 0 else z_0 else:", "epsilon(inc_angle) # the z-coordinate of the intersection of the reflected ray with the", "mirror and the center of curvature of the mirror def epsilon(inc_angle): q =", "+ epsilon(inc_angle) return radius * np.sin(phi) # line equation for extension of the", "of the intersection of the incident ray with the mirror def height(inc_angle): phi", "curvature radius of the mirror in mm (must be positive) angle_d = 30", "else -z_0 z_ref = np.array([surface(h), -2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2", "(must be positive) angle_d = 30 # maximum angle of incidence of the", "beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 * radius if source_pos", "rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius)", "y = np.linspace(-radius, radius, 1000) # mirror equation z = sqrt(R^2 - y^2)", "if np.isnan(z_0): z_0 = -2 * radius if source_pos >= focal_length: z_0 =", "= 80 # source position in mm (must be positive) focal_length = radius", "degrees num_rays = 21 # number of rays source_pos = 80 # source", "for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang * np.pi / 180", "radius * np.sin(inc_angle)) # angle of reflected ray def ref_angle(inc_angle): return inc_angle -", "def ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle) # the z-coordinate of the", "the intersection of the incident ray with the mirror def height(inc_angle): phi =", "import numpy as np from numpy.lib.function_base import angle radius = 100 # curvature", "ray with the axis def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle))", "abs(source_pos) < abs(2 * focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2", "= np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw", "angle of reflected ray def ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle) #", "mirror surface visualization plt.plot([-2 * radius, 0], [0, 0]) # axis of the", "8)) plt.plot(surface(y), y) # mirror surface visualization plt.plot([-2 * radius, 0], [0, 0])", "and the line connecting the point of incidence # of the ray on", "'o') # focal point for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang", "abs(focal_length) and abs(z_0) > abs(2 * radius): z_ref = np.array([surface(h), z_0]) y_ref =", "for extension of the reflected ray def line(inc_angle, z, z0): return np.tan(inc_angle) *", "of the mirror y = np.linspace(-radius, radius, 1000) # mirror equation z =", "angle radius = 100 # curvature radius of the mirror in mm (must", "= np.linspace(-radius, radius, 1000) # mirror equation z = sqrt(R^2 - y^2) -", "line connecting the point of incidence # of the ray on the mirror", "def surface(y): return np.sqrt(radius ** 2 - y ** 2) - radius #", "np.sqrt(radius ** 2 - y ** 2) - radius # angle between the", "center of curvature of the mirror def epsilon(inc_angle): q = radius - source_pos", "numpy.lib.function_base import angle radius = 100 # curvature radius of the mirror in", "h]) plt.plot(z_inc, y_inc, 'k', lw=1) # draw incident beam z_0 = ref_z(inc_angle) if", "y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)]) if abs(source_pos) < abs(2 *", "np.sin(phi) # line equation for extension of the reflected ray def line(inc_angle, z,", "* radius, 0], [0, 0]) # axis of the mirror plt.plot([-focal_length], [0], 'o')", "z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0, h]) plt.plot(z_inc, y_inc, 'k', lw=1) #", "# the y-coordinate of the intersection of the incident ray with the mirror", "radius = 100 # curvature radius of the mirror in mm (must be", "focal point for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle = ang * np.pi", "radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q # the y-coordinate of", "* radius): z_ref = np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r',", "def line(inc_angle, z, z0): return np.tan(inc_angle) * (z - z0) plt.figure(figsize=(13, 8)) plt.plot(surface(y),", "mirror def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius * np.sin(phi) #", "with the mirror def height(inc_angle): phi = ref_angle(inc_angle) + epsilon(inc_angle) return radius *", "= -2 * radius if source_pos >= focal_length: z_0 = -z_0 if z_0", "the mirror y = np.linspace(-radius, radius, 1000) # mirror equation z = sqrt(R^2", "of the mirror in mm (must be positive) angle_d = 30 # maximum", "angle of incidence of the incident beam in degrees num_rays = 21 #", "on the mirror and the center of curvature of the mirror def epsilon(inc_angle):", "focal_length) and abs(source_pos) > abs(focal_length) and abs(z_0) > abs(2 * radius): z_ref =", "the incident ray and the line connecting the point of incidence # of", "if z_0 > 0 else -z_0 z_ref = np.array([surface(h), -2 * radius]) y_ref", "the reflected ray def line(inc_angle, z, z0): return np.tan(inc_angle) * (z - z0)", "= {}\".format(radius, focal_length, -source_pos, angle_d, num_rays)) plt.xlabel(\"z, mm\") plt.ylabel(\"r, mm\") plt.ylim(-radius, radius) plt.xlim(-2", "# focal length of the mirror y = np.linspace(-radius, radius, 1000) # mirror", "the axis def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius", "0], [0, 0]) # axis of the mirror plt.plot([-focal_length], [0], 'o') # focal", "[0], 'o') # focal point for ang in np.linspace(-angle_d, angle_d, num_rays): inc_angle =", "ray and the line connecting the point of incidence # of the ray", "# draw incident beam z_0 = ref_z(inc_angle) if np.isnan(z_0): z_0 = -2 *", "/ 180 h = height(inc_angle) z_inc = np.array([-source_pos, surface(h)]) y_inc = np.array([0, h])", "as np from numpy.lib.function_base import angle radius = 100 # curvature radius of", "mm. Source position = {:.1f} mm.\\nMaximum incident angle = {:.1f} deg. Number of", "* radius, z_0)]) if abs(source_pos) < abs(2 * focal_length) and abs(source_pos) > abs(focal_length)", "/ radius * np.sin(inc_angle)) # angle of reflected ray def ref_angle(inc_angle): return inc_angle", "with the axis def ref_z(inc_angle): q = radius * np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return", "= np.array([surface(h), -2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius, z_0)])", "angle = {:.1f} deg. Number of rays = {}\".format(radius, focal_length, -source_pos, angle_d, num_rays))", ">= focal_length: z_0 = -z_0 if z_0 > 0 else z_0 else: z_0", "y ** 2) - radius # angle between the incident ray and the", "= -z_0 if z_0 > 0 else z_0 else: z_0 = z_0 if", "y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref, 'r', lw=1) plt.title(\"Radius = {:.1f} mm. Focal", "num_rays = 21 # number of rays source_pos = 80 # source position", "abs(2 * radius): z_ref = np.array([surface(h), z_0]) y_ref = np.array([h, 0]) plt.plot(z_ref, y_ref,", "-2 * radius if source_pos >= focal_length: z_0 = -z_0 if z_0 >", "z_ref = np.array([surface(h), -2 * radius]) y_ref = np.array([h, line(ref_angle(inc_angle), -2 * radius,", "/ 2 # focal length of the mirror y = np.linspace(-radius, radius, 1000)", "intersection of the incident ray with the mirror def height(inc_angle): phi = ref_angle(inc_angle)", "* np.sin(-epsilon(inc_angle)) / np.sin(ref_angle(inc_angle)) return radius - q # the y-coordinate of the", "2 * epsilon(inc_angle) # the z-coordinate of the intersection of the reflected ray", "if source_pos >= focal_length: z_0 = -z_0 if z_0 > 0 else z_0", "incident angle = {:.1f} deg. Number of rays = {}\".format(radius, focal_length, -source_pos, angle_d,", "# angle of reflected ray def ref_angle(inc_angle): return inc_angle - 2 * epsilon(inc_angle)" ]
[ "content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp", "',', 'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g', ')', 'hpg', ';']", "expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_ ,'p)h", "NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility import get_logger import unittest from", "h , p[test] [test2]) hp, g) hpg ;' self._do_test(content, exp) def testQuoted(self): exp", "= ['(', '(', 'h', ':', '4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp', ':',", "content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []},", "exp = ['(', '(', 'h ', ',', 'p', ')', 'h p', ',', \"g()[],':_\",", "_do_test(self, content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content =", "{'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments':", "= '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []},", "get_logger import unittest from copy import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def", "from peyotl.utility import get_logger import unittest from copy import deepcopy _LOG = get_logger(__name__)", "p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected)", "'((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type':", "= \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp)", "class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments':", "{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info':", "['(', '(', 'h', ':', '4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436',", "'comments': [], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info':", "] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE,", "'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label':", "[], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content,", "= \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok", "' ( ( h , p[test] [test2]) hp, g) hpg ;' self._do_test(content, exp)", "{'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self,", "content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content,", "testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h", "exp) content = ' ( ( h , p[test] [test2]) hp, g) hpg", "( ( h , p[test] [test2]) hp, g) hpg ;' self._do_test(content, exp) def", "'p', ')', 'h p', ',', \"g()[],':_\", ')', 'hpg', ';'] content = \"((h_ ,'p')h", "i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok =", "'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test',", "',', 'p', ')', 'h p', ',', \"g()[],':_\", ')', 'hpg', ';'] content = \"((h_", "self._do_test(content, exp) def testQuoted(self): exp = ['(', '(', 'h ', ',', 'p', ')',", "content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_", "'1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;'", "'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self):", "None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [],", "content, expected): e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e, expected)", "= '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info':", ";' self._do_test(content, exp) def testQuoted(self): exp = ['(', '(', 'h ', ',', 'p',", "def testQuoted(self): exp = ['(', '(', 'h ', ',', 'p', ')', 'h p',", "self._do_test(content, exp) def _do_test(self, content, expected): e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))]", "_LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(', 'h', ',',", "{'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments':", "'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self, content, expected):", "exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i", "p', ',', \"g()[],':_\", ')', 'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp)", "')', 'hp', ':', '1351.146436', ',', 'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content,", "[{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments':", "NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content", "'h ', ',', 'p', ')', 'h p', ',', \"g()[],':_\", ')', 'hpg', ';'] content", "None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE,", "':', '4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g', ')',", "for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e, expected) if __name__ == \"__main__\": unittest.main()", "import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility import get_logger import", "import StringIO from peyotl.utility import get_logger import unittest from copy import deepcopy _LOG", "['(', '(', 'h ', ',', 'p', ')', 'h p', ',', \"g()[],':_\", ')', 'hpg',", "def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info':", "NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'},", "'comments': [], 'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self, content, expected): e =", "exp) def testQuoted(self): exp = ['(', '(', 'h ', ',', 'p', ')', 'h", "':', '1351.146436', ',', 'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class", "\"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(',", "hp, g) hpg ;' self._do_test(content, exp) def testQuoted(self): exp = ['(', '(', 'h", "'((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []},", "\"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def", "[]}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'},", "StringIO from peyotl.utility import get_logger import unittest from copy import deepcopy _LOG =", "content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' ( ( h , p[test]", "[], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content", "')', 'h p', ',', \"g()[],':_\", ')', 'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\"", "'comments': [], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ]", "hpg ;' self._do_test(content, exp) def testQuoted(self): exp = ['(', '(', 'h ', ',',", "')', 'hp', ',', 'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content", "exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments':", "NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self, content, expected): e", "= \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp =", "None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest',", "'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content,", "exp = ['(', '(', 'h', ':', '4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp',", "content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments':", "NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility import get_logger import unittest from copy", ",'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content))", "[], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None,", "'((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None,", "'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label':", "copy import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(',", "'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None, 'type':", "self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' ( ( h", "'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP,", "python from peyotl.utility.tokenizer import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility", "self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def", "def _do_test(self, content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content", "'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content, exp)", "TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(', 'h', ',', 'p', ')', 'hp', ',',", "import unittest from copy import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self):", "def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_", "')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp", "g) hpg ;' self._do_test(content, exp) def testQuoted(self): exp = ['(', '(', 'h ',", "tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(', '(', 'h', ':',", "'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content =", ", p[test] [test2]) hp, g) hpg ;' self._do_test(content, exp) def testQuoted(self): exp =", "'hp', ':', '1351.146436', ',', 'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp)", "= ['(', '(', 'h', ',', 'p', ')', 'hp', ',', 'g', ')', 'hpg', ';']", "testSimple(self): exp = ['(', '(', 'h', ',', 'p', ')', 'hp', ',', 'g', ')',", "NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'},", "NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None,", "'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type':", "content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i for", "'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info':", "'comments': [], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;'", "'hpg'} ] self._do_test(content, exp) def _do_test(self, content, expected): e = [deepcopy(i) for i", "/usr/bin/env python from peyotl.utility.tokenizer import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from", "in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content))", "testBranchLen(self): exp = ['(', '(', 'h', ':', '4.0', ',', 'p', ':', '1.1461E-5', ')',", "NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'],", "None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [],", "exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type':", "exp = ['(', '(', 'h', ',', 'p', ')', 'hp', ',', 'g', ')', 'hpg',", "self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected):", "'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type':", "',', \"g()[],':_\", ')', 'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content", "{'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments':", "'((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' ( ( h , p[test] [test2]) hp,", "content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = '", "[test2]) hp, g) hpg ;' self._do_test(content, exp) def testQuoted(self): exp = ['(', '(',", "content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE,", "= [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP,", "['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'},", "get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(', 'h', ',', 'p', ')',", "NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [],", "exp) def _do_test(self, content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self):", "for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok", "] self._do_test(content, exp) def _do_test(self, content, expected): e = [deepcopy(i) for i in", "',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i for i in NewickTokenizer(StringIO(content))],", "')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp)", "def testBranchLen(self): exp = ['(', '(', 'h', ':', '4.0', ',', 'p', ':', '1.1461E-5',", "import get_logger import unittest from copy import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase):", "class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(', 'h', ',', 'p', ')', 'hp',", "'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content", "'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None,", "\"g()[],':_\", ')', 'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content =", "NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(', '(', 'h', ':', '4.0', ',',", "'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label':", "'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None,", "'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp =", "'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info':", "'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;'", "[], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp", "#! /usr/bin/env python from peyotl.utility.tokenizer import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO", "unittest from copy import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp", "peyotl.utility import get_logger import unittest from copy import deepcopy _LOG = get_logger(__name__) class", "expected): e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e, expected) if", "'(', 'h', ':', '4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',',", "'4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g', ')', 'hpg',", "None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self, content,", "= [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e, expected) if __name__ ==", "( h , p[test] [test2]) hp, g) hpg ;' self._do_test(content, exp) def testQuoted(self):", "self._do_test(content, exp) content = ' ( ( h , p[test] [test2]) hp, g)", "'h', ':', '4.0', ',', 'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g',", "'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp =", "'((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' ( (", "', ',', 'p', ')', 'h p', ',', \"g()[],':_\", ')', 'hpg', ';'] content =", "p[test] [test2]) hp, g) hpg ;' self._do_test(content, exp) def testQuoted(self): exp = ['(',", "from peyotl.utility.str_util import StringIO from peyotl.utility import get_logger import unittest from copy import", "NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'},", "',', 'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def", "'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label':", "expected) def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content =", "= get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(', 'h', ',', 'p',", "'hp', ',', 'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content =", "'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h", "self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(', '(', 'h', ':', '4.0', ',', 'p',", "def testSimple(self): exp = ['(', '(', 'h', ',', 'p', ')', 'hp', ',', 'g',", "exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE,", "'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments':", "NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'}", "\"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok =", "'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] self._do_test(content, exp) def", "= NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(', '(', 'h', ':', '4.0',", "';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content =", "[], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'},", "['(', '(', 'h', ',', 'p', ')', 'hp', ',', 'g', ')', 'hpg', ';'] content", "= ' ( ( h , p[test] [test2]) hp, g) hpg ;' self._do_test(content,", "_do_test(self, content, expected): e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e,", "peyotl.utility.str_util import StringIO from peyotl.utility import get_logger import unittest from copy import deepcopy", "'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content", "'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE,", "self._do_test(content, exp) content = '((h,[pretest]p[test][posttest])hp,g)hpg;' exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE,", "{'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments':", "'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None, 'type':", "[], 'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self, content, expected): e = [deepcopy(i)", "'p', ':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g', ')', 'hpg', ';'] content", "exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' ( ( h ,", "')', 'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h", "[], 'label': 'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None,", "e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e, expected) if __name__", "';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type':", "None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [],", "= \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i for i", "\"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content, expected): self.assertEqual([i for i in", "None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp)", "'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info':", "'comments': [], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info':", "tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens)", "NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self):", "NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content =", "'h', ',', 'p', ')', 'hp', ',', 'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;'", "NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility import get_logger import unittest", "import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(',", "= ['(', '(', 'h ', ',', 'p', ')', 'h p', ',', \"g()[],':_\", ')',", "exp) def _do_test(self, content, expected): e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] #", "= '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' ( ( h , p[test] [test2])", "content = \"((h_ ,'p)h p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\"", "= '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments':", "[]}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP,", "p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(', '(', 'h',", "'(', 'h ', ',', 'p', ')', 'h p', ',', \"g()[],':_\", ')', 'hpg', ';']", "tok.tokens) def testBranchLen(self): exp = ['(', '(', 'h', ':', '4.0', ',', 'p', ':',", "{'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hpg'} ] content = '((h,p)hp,g)hpg;' self._do_test(content,", "'hp'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'g'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE,", "self.assertEqual([i for i in NewickTokenizer(StringIO(content))], expected) def testOddQuotes(self): content = \"((h_ ,'p)h p,g()[],:_)hpg;\"", "testQuoted(self): exp = ['(', '(', 'h ', ',', 'p', ')', 'h p', ',',", "'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label':", "from copy import deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp =", "':', '1.1461E-5', ')', 'hp', ':', '1351.146436', ',', 'g', ')', 'hpg', ';'] content =", "[deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e) self.assertEqual(e, expected) if __name__ == \"__main__\":", "= '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;' self._do_test(content, exp) content = ' (", "p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self, content,", "{'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type':", "[], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label': 'hp'}, {'edge_info': None,", "testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'edge_info': None,", "'label': 'hpg'} ] self._do_test(content, exp) def _do_test(self, content, expected): e = [deepcopy(i) for", "TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type': NewickEvents.OPEN_SUBTREE, 'comments': []},", ",'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\" self._do_test(content, exp) def _do_test(self,", "NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [],", "';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content, exp) content = \"(('h ',p)h p,'g()[],'':_')hpg;\"", "peyotl.utility.tokenizer import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility import get_logger", "',', 'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp) content = '((h,p[test])hp,g)hpg;'", "<filename>peyotl/test/test_tokenizer.py #! /usr/bin/env python from peyotl.utility.tokenizer import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import", "None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [],", "',', 'p', ')', 'hp', ',', 'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content,", "self._do_test(content, exp) class TestNewickEvents(unittest.TestCase): def testSimple(self): exp = [{'type': NewickEvents.OPEN_SUBTREE, 'comments': []}, {'type':", "deepcopy _LOG = get_logger(__name__) class TestNewickTokenizer(unittest.TestCase): def testSimple(self): exp = ['(', '(', 'h',", "'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': [], 'label': 'p'}, {'edge_info': None, 'type':", "= NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def", "'comments': ['pretest', 'test', 'posttest'], 'label': 'p'}, {'edge_info': None, 'type': NewickEvents.CLOSE_SUBTREE, 'comments': [], 'label':", "'1351.146436', ',', 'g', ')', 'hpg', ';'] content = '((h:4.0,p:1.1461E-5)hp:1351.146436,g)hpg;' self._do_test(content, exp) class TestNewickEvents(unittest.TestCase):", "p,g()[],:_)hpg;\" tok = NewickTokenizer(StringIO(content)) content = \"((h_ ,'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception,", "'h p', ',', \"g()[],':_\", ')', 'hpg', ';'] content = \"((h_ ,'p')h p,'g()[],'':_')hpg;\" self._do_test(content,", ",'p')h p,'g()[]',:_')hpg;\" tok = NewickTokenizer(StringIO(content)) self.assertRaises(Exception, tok.tokens) def testBranchLen(self): exp = ['(', '(',", "'p', ')', 'hp', ',', 'g', ')', 'hpg', ';'] content = '((h,p)hp,g)hpg;' self._do_test(content, exp)", "def _do_test(self, content, expected): e = [deepcopy(i) for i in NewickEventFactory(tokenizer=NewickTokenizer(stream=StringIO(content)))] # print(e)", "'comments': [], 'label': 'h'}, {'edge_info': None, 'type': NewickEvents.TIP, 'comments': ['pretest', 'test', 'posttest'], 'label':", "content = ' ( ( h , p[test] [test2]) hp, g) hpg ;'", "from peyotl.utility.tokenizer import NewickTokenizer, NewickEvents, NewickEventFactory from peyotl.utility.str_util import StringIO from peyotl.utility import", "'(', 'h', ',', 'p', ')', 'hp', ',', 'g', ')', 'hpg', ';'] content =" ]
[ "h < 360: red, green, blue = c, 0, x else: raise ValueError(f\"h", "= v * s x = c * (1 - abs((h/60) % 2", "c elif 300 <= h < 360: red, green, blue = c, 0,", "elif c_max == _green: h = 60 * (((_blue - _red) / delta)", "/ delta) + 4) else: raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else:", "* (((_red - _green) / delta) + 4) else: raise ValueError(f\"c_max ({c_max} is", "c_max) def hsv_to_rgb(h: float, s: float, v: float) -> namedtuple: c = v", "max(_red, _green, _blue) c_min = min(_red, _green, _blue) delta = c_max - c_min", "raise ValueError(f\"h value: {h} is out of range (0, 360)\") return RGB( int((red", "green: int, blue: int) -> float: return 0.2126 * red + 0.587 *", "(((_blue - _red) / delta) + 2) elif c_max == _blue: h =", "= c_max - c_min if delta > 0: if c_max == _red: h", "return 0.2126 * red + 0.587 * green + 0.114 * blue def", "for easier usage\"\"\" def __init__(self, image_path: str): self.image_path = image_path self.image: PillowImage =", "self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\"", "return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width,", "if delta > 0: if c_max == _red: h = 60 * (((_green", "c * (1 - abs((h/60) % 2 - 1)) m = v -", "blue = 0, x, c elif 240 <= h < 300: red, green,", "60 * (((_green - _blue) / delta) % 6) elif c_max == _green:", "/ delta) % 6) elif c_max == _green: h = 60 * (((_blue", "== _blue: h = 60 * (((_red - _green) / delta) + 4)", "300 <= h < 360: red, green, blue = c, 0, x else:", "= c * (1 - abs((h/60) % 2 - 1)) m = v", "== 0 else delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h: float, s: float,", "\"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int) -> PillowImage: return PillowImage.new(\"RGB\",", "is out of range (0, 360)\") return RGB( int((red + m) * 255),", "elif c_max == _blue: h = 60 * (((_red - _green) / delta)", "raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else: h = 0 s =", "'r g b') class Image: \"\"\" Wrapper for Image class for easier usage\"\"\"", "1)) m = v - c if 0 <= h < 60: red,", "c_max == _green: h = 60 * (((_blue - _red) / delta) +", "= 60 * (((_red - _green) / delta) + 4) else: raise ValueError(f\"c_max", "255 c_max = max(_red, _green, _blue) c_min = min(_red, _green, _blue) delta =", "+ m) * 255), int((green + m) * 255), int((blue + m) *", "2) elif c_max == _blue: h = 60 * (((_red - _green) /", "green, blue = c, x, 0 elif 60 <= h < 120: red,", "/ 255 _green = green / 255 _blue = blue / 255 c_max", "for Image class for easier usage\"\"\" def __init__(self, image_path: str): self.image_path = image_path", "algorithms\"\"\" from PIL import Image as PillowImage from collections import namedtuple ImageData =", "ImageData = namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h s v') RGB =", "\"\"\" :return: x, y in pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int,", "int((red + m) * 255), int((green + m) * 255), int((blue + m)", "0, x, c elif 240 <= h < 300: red, green, blue =", "__init__(self, image_path: str): self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load()", "green, blue = c, 0, x else: raise ValueError(f\"h value: {h} is out", "60: red, green, blue = c, x, 0 elif 60 <= h <", "0 elif 60 <= h < 120: red, green, blue = x, c,", "if c_max == 0 else delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h: float,", "pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int) -> PillowImage: return", "Module including utilities for main algorithms\"\"\" from PIL import Image as PillowImage from", "Image as PillowImage from collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV", "120 <= h < 180: red, green, blue = 0, c, x elif", "s, c_max) def hsv_to_rgb(h: float, s: float, v: float) -> namedtuple: c =", "c, x, 0 elif 60 <= h < 120: red, green, blue =", "c_max = max(_red, _green, _blue) c_min = min(_red, _green, _blue) delta = c_max", "def get_greyscale(red: int, green: int, blue: int) -> float: return 0.2126 * red", "x, y in pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int)", "in pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int) -> PillowImage:", "namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\", 'r g b') class Image: \"\"\"", "from PIL import Image as PillowImage from collections import namedtuple ImageData = namedtuple(\"ImgData\",", "'h s v') RGB = namedtuple(\"RGB\", 'r g b') class Image: \"\"\" Wrapper", "if 0 <= h < 60: red, green, blue = c, x, 0", "_green) / delta) + 4) else: raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\")", "(1 - abs((h/60) % 2 - 1)) m = v - c if", "HSV(h, s, c_max) def hsv_to_rgb(h: float, s: float, v: float) -> namedtuple: c", "green / 255 _blue = blue / 255 c_max = max(_red, _green, _blue)", "blue = c, x, 0 elif 60 <= h < 120: red, green,", "- _red) / delta) + 2) elif c_max == _blue: h = 60", "get_greyscale(red: int, green: int, blue: int) -> float: return 0.2126 * red +", "255 _blue = blue / 255 c_max = max(_red, _green, _blue) c_min =", "c if 0 <= h < 60: red, green, blue = c, x,", "blue def rgb_to_hsv(red: int, green: int, blue: int) -> namedtuple: _red = red", "int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int, green: int,", "image_path: str): self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def", "== _red: h = 60 * (((_green - _blue) / delta) % 6)", "return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int, green: int, blue: int) ->", "x, 0, c elif 300 <= h < 360: red, green, blue =", "str): self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self):", "range (0, 360)\") return RGB( int((red + m) * 255), int((green + m)", "green, blue = 0, c, x elif 180 <= h < 240: red,", "blue / 255 c_max = max(_red, _green, _blue) c_min = min(_red, _green, _blue)", "* s x = c * (1 - abs((h/60) % 2 - 1))", "def create_empty_image(width: int, height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def", "image') HSV = namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\", 'r g b')", "+ 0.587 * green + 0.114 * blue def rgb_to_hsv(red: int, green: int,", "float, s: float, v: float) -> namedtuple: c = v * s x", "h < 240: red, green, blue = 0, x, c elif 240 <=", "red, green, blue = 0, x, c elif 240 <= h < 300:", "blue = 0, c, x elif 180 <= h < 240: red, green,", "x, 0 elif 60 <= h < 120: red, green, blue = x,", "Wrapper for Image class for easier usage\"\"\" def __init__(self, image_path: str): self.image_path =", "of range (0, 360)\") return RGB( int((red + m) * 255), int((green +", "else delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h: float, s: float, v: float)", "self.image.size[1] def create_empty_image(width: int, height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\")", "-> namedtuple: _red = red / 255 _green = green / 255 _blue", "180: red, green, blue = 0, c, x elif 180 <= h <", "\"\"\" Wrapper for Image class for easier usage\"\"\" def __init__(self, image_path: str): self.image_path", "= 0 s = 0 if c_max == 0 else delta/c_max return HSV(h,", "PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int, green: int, blue: int) -> float:", "delta) + 2) elif c_max == _blue: h = 60 * (((_red -", "blue = x, c, 0 elif 120 <= h < 180: red, green,", "0 if c_max == 0 else delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h:", "2 - 1)) m = v - c if 0 <= h <", "is not equal {_red}/{_green}/{_blue})\") else: h = 0 s = 0 if c_max", "<= h < 240: red, green, blue = 0, x, c elif 240", "/ delta) + 2) elif c_max == _blue: h = 60 * (((_red", "h = 0 s = 0 if c_max == 0 else delta/c_max return", "delta > 0: if c_max == _red: h = 60 * (((_green -", "return RGB( int((red + m) * 255), int((green + m) * 255), int((blue", "= namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\", 'r g b') class Image:", "= x, c, 0 elif 120 <= h < 180: red, green, blue", "float, v: float) -> namedtuple: c = v * s x = c", "height), \"#000000\") def get_greyscale(red: int, green: int, blue: int) -> float: return 0.2126", "= max(_red, _green, _blue) c_min = min(_red, _green, _blue) delta = c_max -", "<= h < 300: red, green, blue = x, 0, c elif 300", "_green, _blue) delta = c_max - c_min if delta > 0: if c_max", "(((_red - _green) / delta) + 4) else: raise ValueError(f\"c_max ({c_max} is not", "0.2126 * red + 0.587 * green + 0.114 * blue def rgb_to_hsv(red:", "collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h s", "Image class for easier usage\"\"\" def __init__(self, image_path: str): self.image_path = image_path self.image:", "including utilities for main algorithms\"\"\" from PIL import Image as PillowImage from collections", "m = v - c if 0 <= h < 60: red, green,", "s = 0 if c_max == 0 else delta/c_max return HSV(h, s, c_max)", "{_red}/{_green}/{_blue})\") else: h = 0 s = 0 if c_max == 0 else", "= min(_red, _green, _blue) delta = c_max - c_min if delta > 0:", "0, c, x elif 180 <= h < 240: red, green, blue =", "red, green, blue = x, 0, c elif 300 <= h < 360:", "360)\") return RGB( int((red + m) * 255), int((green + m) * 255),", "* (1 - abs((h/60) % 2 - 1)) m = v - c", "height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int, green:", "elif 60 <= h < 120: red, green, blue = x, c, 0", "< 60: red, green, blue = c, x, 0 elif 60 <= h", "out of range (0, 360)\") return RGB( int((red + m) * 255), int((green", "60 <= h < 120: red, green, blue = x, c, 0 elif", "c = v * s x = c * (1 - abs((h/60) %", "blue: int) -> namedtuple: _red = red / 255 _green = green /", "get_size(self): \"\"\" :return: x, y in pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width:", "int) -> namedtuple: _red = red / 255 _green = green / 255", "int, green: int, blue: int) -> namedtuple: _red = red / 255 _green", "namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\", 'r", "ValueError(f\"h value: {h} is out of range (0, 360)\") return RGB( int((red +", "utilities for main algorithms\"\"\" from PIL import Image as PillowImage from collections import", "* 255), int((green + m) * 255), int((blue + m) * 255) )", "h < 300: red, green, blue = x, 0, c elif 300 <=", "'header image') HSV = namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\", 'r g", "360: red, green, blue = c, 0, x else: raise ValueError(f\"h value: {h}", "h < 120: red, green, blue = x, c, 0 elif 120 <=", "<= h < 360: red, green, blue = c, 0, x else: raise", "delta) % 6) elif c_max == _green: h = 60 * (((_blue -", "elif 120 <= h < 180: red, green, blue = 0, c, x", "0 elif 120 <= h < 180: red, green, blue = 0, c,", "/ 255 c_max = max(_red, _green, _blue) c_min = min(_red, _green, _blue) delta", "c_max == _blue: h = 60 * (((_red - _green) / delta) +", "else: raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else: h = 0 s", "int, blue: int) -> namedtuple: _red = red / 255 _green = green", "_blue) / delta) % 6) elif c_max == _green: h = 60 *", "+ 4) else: raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else: h =", "% 2 - 1)) m = v - c if 0 <= h", "= 0, c, x elif 180 <= h < 240: red, green, blue", "h = 60 * (((_red - _green) / delta) + 4) else: raise", "\"\"\" Module including utilities for main algorithms\"\"\" from PIL import Image as PillowImage", "c, 0 elif 120 <= h < 180: red, green, blue = 0,", "import Image as PillowImage from collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header image')", "- c_min if delta > 0: if c_max == _red: h = 60", "- 1)) m = v - c if 0 <= h < 60:", "x elif 180 <= h < 240: red, green, blue = 0, x,", "def rgb_to_hsv(red: int, green: int, blue: int) -> namedtuple: _red = red /", "float: return 0.2126 * red + 0.587 * green + 0.114 * blue", "abs((h/60) % 2 - 1)) m = v - c if 0 <=", "s: float, v: float) -> namedtuple: c = v * s x =", "red / 255 _green = green / 255 _blue = blue / 255", "{h} is out of range (0, 360)\") return RGB( int((red + m) *", "0.587 * green + 0.114 * blue def rgb_to_hsv(red: int, green: int, blue:", "v * s x = c * (1 - abs((h/60) % 2 -", "_green, _blue) c_min = min(_red, _green, _blue) delta = c_max - c_min if", "RGB = namedtuple(\"RGB\", 'r g b') class Image: \"\"\" Wrapper for Image class", "_red = red / 255 _green = green / 255 _blue = blue", ":return: x, y in pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height:", "300: red, green, blue = x, 0, c elif 300 <= h <", "_red) / delta) + 2) elif c_max == _blue: h = 60 *", "ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else: h = 0 s = 0", "namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h s v') RGB", "= c, x, 0 elif 60 <= h < 120: red, green, blue", "180 <= h < 240: red, green, blue = 0, x, c elif", "for main algorithms\"\"\" from PIL import Image as PillowImage from collections import namedtuple", "blue: int) -> float: return 0.2126 * red + 0.587 * green +", "easier usage\"\"\" def __init__(self, image_path: str): self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path)", "image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\" :return: x,", "namedtuple: c = v * s x = c * (1 - abs((h/60)", "PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int, green: int, blue: int)", "* (((_blue - _red) / delta) + 2) elif c_max == _blue: h", "= x, 0, c elif 300 <= h < 360: red, green, blue", "0, x else: raise ValueError(f\"h value: {h} is out of range (0, 360)\")", "60 * (((_red - _green) / delta) + 4) else: raise ValueError(f\"c_max ({c_max}", "= namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\",", "_blue = blue / 255 c_max = max(_red, _green, _blue) c_min = min(_red,", "red + 0.587 * green + 0.114 * blue def rgb_to_hsv(red: int, green:", "(width, height), \"#000000\") def get_greyscale(red: int, green: int, blue: int) -> float: return", "c_max - c_min if delta > 0: if c_max == _red: h =", "int, height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int,", "h = 60 * (((_blue - _red) / delta) + 2) elif c_max", "= namedtuple(\"RGB\", 'r g b') class Image: \"\"\" Wrapper for Image class for", "- abs((h/60) % 2 - 1)) m = v - c if 0", "m) * 255), int((green + m) * 255), int((blue + m) * 255)", "x, c, 0 elif 120 <= h < 180: red, green, blue =", "PIL import Image as PillowImage from collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header", "def hsv_to_rgb(h: float, s: float, v: float) -> namedtuple: c = v *", "min(_red, _green, _blue) delta = c_max - c_min if delta > 0: if", "x else: raise ValueError(f\"h value: {h} is out of range (0, 360)\") return", "PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\" :return: x, y in", "create_empty_image(width: int, height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red:", "0 s = 0 if c_max == 0 else delta/c_max return HSV(h, s,", "green: int, blue: int) -> namedtuple: _red = red / 255 _green =", "namedtuple: _red = red / 255 _green = green / 255 _blue =", "\"#000000\") def get_greyscale(red: int, green: int, blue: int) -> float: return 0.2126 *", "_green = green / 255 _blue = blue / 255 c_max = max(_red,", "v: float) -> namedtuple: c = v * s x = c *", "(0, 360)\") return RGB( int((red + m) * 255), int((green + m) *", "self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\" :return: x, y", "_blue) c_min = min(_red, _green, _blue) delta = c_max - c_min if delta", "0, c elif 300 <= h < 360: red, green, blue = c,", "_green: h = 60 * (((_blue - _red) / delta) + 2) elif", "hsv_to_rgb(h: float, s: float, v: float) -> namedtuple: c = v * s", "-> float: return 0.2126 * red + 0.587 * green + 0.114 *", "else: raise ValueError(f\"h value: {h} is out of range (0, 360)\") return RGB(", "< 360: red, green, blue = c, 0, x else: raise ValueError(f\"h value:", "green, blue = x, c, 0 elif 120 <= h < 180: red,", "y in pixels \"\"\" return self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int) ->", "240: red, green, blue = 0, x, c elif 240 <= h <", "PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\" :return: x, y in pixels \"\"\"", "c_min if delta > 0: if c_max == _red: h = 60 *", "red, green, blue = c, 0, x else: raise ValueError(f\"h value: {h} is", "% 6) elif c_max == _green: h = 60 * (((_blue - _red)", "usage\"\"\" def __init__(self, image_path: str): self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels", "0.114 * blue def rgb_to_hsv(red: int, green: int, blue: int) -> namedtuple: _red", "= self.image.load() def get_size(self): \"\"\" :return: x, y in pixels \"\"\" return self.image.size[0],", "g b') class Image: \"\"\" Wrapper for Image class for easier usage\"\"\" def", "= blue / 255 c_max = max(_red, _green, _blue) c_min = min(_red, _green,", "60 * (((_blue - _red) / delta) + 2) elif c_max == _blue:", "s x = c * (1 - abs((h/60) % 2 - 1)) m", "(((_green - _blue) / delta) % 6) elif c_max == _green: h =", "0 <= h < 60: red, green, blue = c, x, 0 elif", "s v') RGB = namedtuple(\"RGB\", 'r g b') class Image: \"\"\" Wrapper for", "PillowImage from collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\",", "- c if 0 <= h < 60: red, green, blue = c,", "x, c elif 240 <= h < 300: red, green, blue = x,", "RGB( int((red + m) * 255), int((green + m) * 255), int((blue +", "- _green) / delta) + 4) else: raise ValueError(f\"c_max ({c_max} is not equal", "= red / 255 _green = green / 255 _blue = blue /", "_blue) delta = c_max - c_min if delta > 0: if c_max ==", "c, x elif 180 <= h < 240: red, green, blue = 0,", "def __init__(self, image_path: str): self.image_path = image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels =", "green + 0.114 * blue def rgb_to_hsv(red: int, green: int, blue: int) ->", "255 _green = green / 255 _blue = blue / 255 c_max =", "= image_path self.image: PillowImage = PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\" :return:", "= 60 * (((_blue - _red) / delta) + 2) elif c_max ==", "red, green, blue = 0, c, x elif 180 <= h < 240:", "v - c if 0 <= h < 60: red, green, blue =", "c elif 240 <= h < 300: red, green, blue = x, 0,", "= c, 0, x else: raise ValueError(f\"h value: {h} is out of range", "= v - c if 0 <= h < 60: red, green, blue", "x = c * (1 - abs((h/60) % 2 - 1)) m =", "namedtuple(\"RGB\", 'r g b') class Image: \"\"\" Wrapper for Image class for easier", "float) -> namedtuple: c = v * s x = c * (1", "120: red, green, blue = x, c, 0 elif 120 <= h <", "Image: \"\"\" Wrapper for Image class for easier usage\"\"\" def __init__(self, image_path: str):", "as PillowImage from collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV =", "self.pixels = self.image.load() def get_size(self): \"\"\" :return: x, y in pixels \"\"\" return", "c_max == _red: h = 60 * (((_green - _blue) / delta) %", "- _blue) / delta) % 6) elif c_max == _green: h = 60", "elif 240 <= h < 300: red, green, blue = x, 0, c", "elif 180 <= h < 240: red, green, blue = 0, x, c", "HSV = namedtuple(\"HSV\", 'h s v') RGB = namedtuple(\"RGB\", 'r g b') class", "h < 60: red, green, blue = c, x, 0 elif 60 <=", "-> namedtuple: c = v * s x = c * (1 -", "h = 60 * (((_green - _blue) / delta) % 6) elif c_max", "_blue: h = 60 * (((_red - _green) / delta) + 4) else:", "= 0, x, c elif 240 <= h < 300: red, green, blue", "green, blue = x, 0, c elif 300 <= h < 360: red,", "v') RGB = namedtuple(\"RGB\", 'r g b') class Image: \"\"\" Wrapper for Image", "not equal {_red}/{_green}/{_blue})\") else: h = 0 s = 0 if c_max ==", "self.image.load() def get_size(self): \"\"\" :return: x, y in pixels \"\"\" return self.image.size[0], self.image.size[1]", "<= h < 120: red, green, blue = x, c, 0 elif 120", "0: if c_max == _red: h = 60 * (((_green - _blue) /", "* green + 0.114 * blue def rgb_to_hsv(red: int, green: int, blue: int)", "<= h < 60: red, green, blue = c, x, 0 elif 60", "6) elif c_max == _green: h = 60 * (((_blue - _red) /", "if c_max == _red: h = 60 * (((_green - _blue) / delta)", "delta = c_max - c_min if delta > 0: if c_max == _red:", "green, blue = 0, x, c elif 240 <= h < 300: red,", "c_min = min(_red, _green, _blue) delta = c_max - c_min if delta >", "elif 300 <= h < 360: red, green, blue = c, 0, x", "+ 2) elif c_max == _blue: h = 60 * (((_red - _green)", "_red: h = 60 * (((_green - _blue) / delta) % 6) elif", "b') class Image: \"\"\" Wrapper for Image class for easier usage\"\"\" def __init__(self,", "blue = x, 0, c elif 300 <= h < 360: red, green,", "int, blue: int) -> float: return 0.2126 * red + 0.587 * green", "< 300: red, green, blue = x, 0, c elif 300 <= h", "({c_max} is not equal {_red}/{_green}/{_blue})\") else: h = 0 s = 0 if", "= 0 if c_max == 0 else delta/c_max return HSV(h, s, c_max) def", "240 <= h < 300: red, green, blue = x, 0, c elif", "c_max == 0 else delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h: float, s:", "/ 255 _blue = blue / 255 c_max = max(_red, _green, _blue) c_min", "rgb_to_hsv(red: int, green: int, blue: int) -> namedtuple: _red = red / 255", "def get_size(self): \"\"\" :return: x, y in pixels \"\"\" return self.image.size[0], self.image.size[1] def", "equal {_red}/{_green}/{_blue})\") else: h = 0 s = 0 if c_max == 0", "h < 180: red, green, blue = 0, c, x elif 180 <=", "* (((_green - _blue) / delta) % 6) elif c_max == _green: h", "int, green: int, blue: int) -> float: return 0.2126 * red + 0.587", "red, green, blue = c, x, 0 elif 60 <= h < 120:", "import namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h s v')", "> 0: if c_max == _red: h = 60 * (((_green - _blue)", "+ 0.114 * blue def rgb_to_hsv(red: int, green: int, blue: int) -> namedtuple:", "from collections import namedtuple ImageData = namedtuple(\"ImgData\", 'header image') HSV = namedtuple(\"HSV\", 'h", "self.image.size[0], self.image.size[1] def create_empty_image(width: int, height: int) -> PillowImage: return PillowImage.new(\"RGB\", (width, height),", "value: {h} is out of range (0, 360)\") return RGB( int((red + m)", "int) -> float: return 0.2126 * red + 0.587 * green + 0.114", "delta) + 4) else: raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else: h", "class for easier usage\"\"\" def __init__(self, image_path: str): self.image_path = image_path self.image: PillowImage", "blue = c, 0, x else: raise ValueError(f\"h value: {h} is out of", "= PillowImage.open(self.image_path) self.pixels = self.image.load() def get_size(self): \"\"\" :return: x, y in pixels", "= green / 255 _blue = blue / 255 c_max = max(_red, _green,", "* blue def rgb_to_hsv(red: int, green: int, blue: int) -> namedtuple: _red =", "c, 0, x else: raise ValueError(f\"h value: {h} is out of range (0,", "< 120: red, green, blue = x, c, 0 elif 120 <= h", "= 60 * (((_green - _blue) / delta) % 6) elif c_max ==", "4) else: raise ValueError(f\"c_max ({c_max} is not equal {_red}/{_green}/{_blue})\") else: h = 0", "else: h = 0 s = 0 if c_max == 0 else delta/c_max", "-> PillowImage: return PillowImage.new(\"RGB\", (width, height), \"#000000\") def get_greyscale(red: int, green: int, blue:", "== _green: h = 60 * (((_blue - _red) / delta) + 2)", "delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h: float, s: float, v: float) ->", "<= h < 180: red, green, blue = 0, c, x elif 180", "return HSV(h, s, c_max) def hsv_to_rgb(h: float, s: float, v: float) -> namedtuple:", "red, green, blue = x, c, 0 elif 120 <= h < 180:", "* red + 0.587 * green + 0.114 * blue def rgb_to_hsv(red: int,", "main algorithms\"\"\" from PIL import Image as PillowImage from collections import namedtuple ImageData", "< 180: red, green, blue = 0, c, x elif 180 <= h", "< 240: red, green, blue = 0, x, c elif 240 <= h", "class Image: \"\"\" Wrapper for Image class for easier usage\"\"\" def __init__(self, image_path:", "0 else delta/c_max return HSV(h, s, c_max) def hsv_to_rgb(h: float, s: float, v:" ]
[ "op, item) if (s := safe_get(self.bits, pos, \"\")) not in Operations: return token,", "token = Token.create(token, '+', item) if safe_get(self.bits, pos, \"\") != \"+\": return token,", "= self.parse_sub_expression(pos) token = Token.create(token, '*', item) if safe_get(self.bits, pos, \"\") != \"*\":", "simple(line: str) -> Token: return Simple(line).token @staticmethod def advanced(line: str) -> Token: return", "return Simple(line).token @staticmethod def advanced(line: str) -> Token: return Advanced(line).token @staticmethod def sum(tokens:", "import safe_get from .token import Op, Operations, Token class Tokenizer: @staticmethod def simple(line:", "Optional[Op] = None while True: item, pos = self.parse_item(pos) token = Token.create(token, op,", "-> tuple[Token, int]: token: Optional[Token] = None op: Optional[Op] = None while True:", "class TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits:", "safe_get(self.bits, pos, \"\")) not in Operations: return token, pos op = s #", "p.strip() != \"\" ] if not self.bits: raise TokenException(\"Got nothing to parse\") token,", "!= \")\": raise TokenException(\"Bracket not closed correctly\") else: item = Token.from_int(self.bits[pos]) return item,", "None op: Optional[Op] = None while True: item, pos = self.parse_item(pos) token =", "tuple[Token, int]: token: Optional[Token] = None while True: item, pos = self.parse_item(pos) token", "Token.from_int(self.bits[pos]) return item, pos + 1 def parse_expression(self, pos: int) -> tuple[Token, int]:", "@staticmethod def sum(tokens: list[Token]) -> int: return sum(t.get_value() for t in tokens) class", "parse\") token, pos = self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could not parse", "int) -> tuple[Token, int]: token: Optional[Token] = None while True: item, pos =", "\"\")) not in Operations: return token, pos op = s # type: ignore", "int]: if self.bits[pos] == \"(\": item, pos = self.parse_expression(pos + 1) if safe_get(self.bits,", "item, pos = self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\") != \")\": raise", "True: item, pos = self.parse_item(pos) token = Token.create(token, op, item) if (s :=", "advanced(line: str) -> Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int: return", "return Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int: return sum(t.get_value() for t in", "True: item, pos = self.parse_sub_expression(pos) token = Token.create(token, '*', item) if safe_get(self.bits, pos,", "tokens) class TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str):", "not in Operations: return token, pos op = s # type: ignore pos", "= self.parse_item(pos) token = Token.create(token, '+', item) if safe_get(self.bits, pos, \"\") != \"+\":", "for t in tokens) class TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def", "= Token.create(token, '*', item) if safe_get(self.bits, pos, \"\") != \"*\": return token, pos", "nothing to parse\") token, pos = self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could", "TokenException(\"Could not parse line\") self.token = token def parse_item(self, pos: int) -> tuple[Token,", "1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None op:", "\")\": raise TokenException(\"Bracket not closed correctly\") else: item = Token.from_int(self.bits[pos]) return item, pos", "from .token import Op, Operations, Token class Tokenizer: @staticmethod def simple(line: str) ->", "token, pos pos += 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token:", "self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could not parse line\") self.token = token", "sum(tokens: list[Token]) -> int: return sum(t.get_value() for t in tokens) class TokenException(Exception): pass", "item, pos = self.parse_item(pos) token = Token.create(token, '+', item) if safe_get(self.bits, pos, \"\")", "line: str): self.bits: list[str] = [ p for p in Simple.pattern.findall(line.strip()) if p.strip()", "-> int: return sum(t.get_value() for t in tokens) class TokenException(Exception): pass class Simple:", "while True: item, pos = self.parse_sub_expression(pos) token = Token.create(token, '*', item) if safe_get(self.bits,", "Token.create(token, '*', item) if safe_get(self.bits, pos, \"\") != \"*\": return token, pos pos", "pos += 1 class Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token, int]: token:", "= Token.create(token, op, item) if (s := safe_get(self.bits, pos, \"\")) not in Operations:", "not closed correctly\") else: item = Token.from_int(self.bits[pos]) return item, pos + 1 def", "raise TokenException(\"Bracket not closed correctly\") else: item = Token.from_int(self.bits[pos]) return item, pos +", "int) -> tuple[Token, int]: if self.bits[pos] == \"(\": item, pos = self.parse_expression(pos +", "def __init__(self, line: str): self.bits: list[str] = [ p for p in Simple.pattern.findall(line.strip())", "int]: token: Optional[Token] = None while True: item, pos = self.parse_item(pos) token =", "closed correctly\") else: item = Token.from_int(self.bits[pos]) return item, pos + 1 def parse_expression(self,", "in tokens) class TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line:", "Token.create(token, op, item) if (s := safe_get(self.bits, pos, \"\")) not in Operations: return", "p for p in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if not self.bits:", "!= len(self.bits): raise TokenException(\"Could not parse line\") self.token = token def parse_item(self, pos:", "import re from typing import Optional from common.utils import safe_get from .token import", "common.utils import safe_get from .token import Op, Operations, Token class Tokenizer: @staticmethod def", "-> tuple[Token, int]: if self.bits[pos] == \"(\": item, pos = self.parse_expression(pos + 1)", "p in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if not self.bits: raise TokenException(\"Got", "TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str]", "pos pos += 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token]", "to parse\") token, pos = self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could not", "pos: int) -> tuple[Token, int]: if self.bits[pos] == \"(\": item, pos = self.parse_expression(pos", "return sum(t.get_value() for t in tokens) class TokenException(Exception): pass class Simple: pattern =", "tuple[Token, int]: if self.bits[pos] == \"(\": item, pos = self.parse_expression(pos + 1) if", "from typing import Optional from common.utils import safe_get from .token import Op, Operations,", "1 class Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] =", "sum(t.get_value() for t in tokens) class TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\")", "item = Token.from_int(self.bits[pos]) return item, pos + 1 def parse_expression(self, pos: int) ->", "item) if safe_get(self.bits, pos, \"\") != \"+\": return token, pos pos += 1", "Op, Operations, Token class Tokenizer: @staticmethod def simple(line: str) -> Token: return Simple(line).token", "Optional[Token] = None while True: item, pos = self.parse_item(pos) token = Token.create(token, '+',", "self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket not closed", "self.bits: list[str] = [ p for p in Simple.pattern.findall(line.strip()) if p.strip() != \"\"", "TokenException(\"Got nothing to parse\") token, pos = self.parse_expression(0) if pos != len(self.bits): raise", "+ 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None", "def advanced(line: str) -> Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int:", "int]: token: Optional[Token] = None op: Optional[Op] = None while True: item, pos", "Optional[Token] = None op: Optional[Op] = None while True: item, pos = self.parse_item(pos)", "\"(\": item, pos = self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\") != \")\":", "if pos != len(self.bits): raise TokenException(\"Could not parse line\") self.token = token def", "def sum(tokens: list[Token]) -> int: return sum(t.get_value() for t in tokens) class TokenException(Exception):", "self.bits: raise TokenException(\"Got nothing to parse\") token, pos = self.parse_expression(0) if pos !=", "'*', item) if safe_get(self.bits, pos, \"\") != \"*\": return token, pos pos +=", "class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str] = [", "self.token = token def parse_item(self, pos: int) -> tuple[Token, int]: if self.bits[pos] ==", "else: item = Token.from_int(self.bits[pos]) return item, pos + 1 def parse_expression(self, pos: int)", "line\") self.token = token def parse_item(self, pos: int) -> tuple[Token, int]: if self.bits[pos]", "parse_item(self, pos: int) -> tuple[Token, int]: if self.bits[pos] == \"(\": item, pos =", "str) -> Token: return Simple(line).token @staticmethod def advanced(line: str) -> Token: return Advanced(line).token", "Token: return Simple(line).token @staticmethod def advanced(line: str) -> Token: return Advanced(line).token @staticmethod def", "pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str] = [ p for", "pos, \"\") != \"+\": return token, pos pos += 1 def parse_expression(self, pos:", "+= 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None", "pos + 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] =", "None while True: item, pos = self.parse_sub_expression(pos) token = Token.create(token, '*', item) if", "def parse_item(self, pos: int) -> tuple[Token, int]: if self.bits[pos] == \"(\": item, pos", "Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int: return sum(t.get_value() for t", "tuple[Token, int]: token: Optional[Token] = None op: Optional[Op] = None while True: item,", "int: return sum(t.get_value() for t in tokens) class TokenException(Exception): pass class Simple: pattern", "token: Optional[Token] = None op: Optional[Op] = None while True: item, pos =", "safe_get from .token import Op, Operations, Token class Tokenizer: @staticmethod def simple(line: str)", "Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if not self.bits: raise TokenException(\"Got nothing to", "if self.bits[pos] == \"(\": item, pos = self.parse_expression(pos + 1) if safe_get(self.bits, pos,", "+= 1 class Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token]", "Operations, Token class Tokenizer: @staticmethod def simple(line: str) -> Token: return Simple(line).token @staticmethod", "== \"(\": item, pos = self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\") !=", "!= \"+\": return token, pos pos += 1 def parse_expression(self, pos: int) ->", "re from typing import Optional from common.utils import safe_get from .token import Op,", "parse_sub_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None while True: item,", "= None while True: item, pos = self.parse_item(pos) token = Token.create(token, op, item)", "pos = self.parse_item(pos) token = Token.create(token, op, item) if (s := safe_get(self.bits, pos,", "# type: ignore pos += 1 class Advanced(Simple): def parse_sub_expression(self, pos: int) ->", "-> tuple[Token, int]: token: Optional[Token] = None while True: item, pos = self.parse_item(pos)", "int]: token: Optional[Token] = None while True: item, pos = self.parse_sub_expression(pos) token =", "Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int: return sum(t.get_value() for t in tokens)", "if (s := safe_get(self.bits, pos, \"\")) not in Operations: return token, pos op", "token def parse_item(self, pos: int) -> tuple[Token, int]: if self.bits[pos] == \"(\": item,", "Token class Tokenizer: @staticmethod def simple(line: str) -> Token: return Simple(line).token @staticmethod def", "= self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could not parse line\") self.token =", "not parse line\") self.token = token def parse_item(self, pos: int) -> tuple[Token, int]:", "TokenException(\"Bracket not closed correctly\") else: item = Token.from_int(self.bits[pos]) return item, pos + 1", "Optional from common.utils import safe_get from .token import Op, Operations, Token class Tokenizer:", "None while True: item, pos = self.parse_item(pos) token = Token.create(token, op, item) if", "return item, pos + 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token:", "= None while True: item, pos = self.parse_item(pos) token = Token.create(token, '+', item)", "parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None while True: item,", "Optional[Token] = None while True: item, pos = self.parse_sub_expression(pos) token = Token.create(token, '*',", "if safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket not closed correctly\") else: item", "int) -> tuple[Token, int]: token: Optional[Token] = None op: Optional[Op] = None while", "class Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None", "= self.parse_item(pos) token = Token.create(token, op, item) if (s := safe_get(self.bits, pos, \"\"))", "while True: item, pos = self.parse_item(pos) token = Token.create(token, '+', item) if safe_get(self.bits,", "item, pos = self.parse_sub_expression(pos) token = Token.create(token, '*', item) if safe_get(self.bits, pos, \"\")", "len(self.bits): raise TokenException(\"Could not parse line\") self.token = token def parse_item(self, pos: int)", "Tokenizer: @staticmethod def simple(line: str) -> Token: return Simple(line).token @staticmethod def advanced(line: str)", "= Token.from_int(self.bits[pos]) return item, pos + 1 def parse_expression(self, pos: int) -> tuple[Token,", "if not self.bits: raise TokenException(\"Got nothing to parse\") token, pos = self.parse_expression(0) if", "import Optional from common.utils import safe_get from .token import Op, Operations, Token class", "@staticmethod def simple(line: str) -> Token: return Simple(line).token @staticmethod def advanced(line: str) ->", "True: item, pos = self.parse_item(pos) token = Token.create(token, '+', item) if safe_get(self.bits, pos,", "def simple(line: str) -> Token: return Simple(line).token @staticmethod def advanced(line: str) -> Token:", "str) -> Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int: return sum(t.get_value()", "from common.utils import safe_get from .token import Op, Operations, Token class Tokenizer: @staticmethod", "t in tokens) class TokenException(Exception): pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self,", "= s # type: ignore pos += 1 class Advanced(Simple): def parse_sub_expression(self, pos:", "s # type: ignore pos += 1 class Advanced(Simple): def parse_sub_expression(self, pos: int)", "self.parse_item(pos) token = Token.create(token, '+', item) if safe_get(self.bits, pos, \"\") != \"+\": return", "ignore pos += 1 class Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token, int]:", "safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket not closed correctly\") else: item =", "safe_get(self.bits, pos, \"\") != \"+\": return token, pos pos += 1 def parse_expression(self,", "= None while True: item, pos = self.parse_sub_expression(pos) token = Token.create(token, '*', item)", "Operations: return token, pos op = s # type: ignore pos += 1", "Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None while", "str): self.bits: list[str] = [ p for p in Simple.pattern.findall(line.strip()) if p.strip() !=", "in Operations: return token, pos op = s # type: ignore pos +=", "-> tuple[Token, int]: token: Optional[Token] = None while True: item, pos = self.parse_sub_expression(pos)", "in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if not self.bits: raise TokenException(\"Got nothing", "[ p for p in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if not", "while True: item, pos = self.parse_item(pos) token = Token.create(token, op, item) if (s", "def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None op: Optional[Op]", "typing import Optional from common.utils import safe_get from .token import Op, Operations, Token", "pos = self.parse_item(pos) token = Token.create(token, '+', item) if safe_get(self.bits, pos, \"\") !=", "token, pos = self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could not parse line\")", "correctly\") else: item = Token.from_int(self.bits[pos]) return item, pos + 1 def parse_expression(self, pos:", "item, pos + 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token]", "= None op: Optional[Op] = None while True: item, pos = self.parse_item(pos) token", "return token, pos op = s # type: ignore pos += 1 class", "token, pos op = s # type: ignore pos += 1 class Advanced(Simple):", "op: Optional[Op] = None while True: item, pos = self.parse_item(pos) token = Token.create(token,", "not self.bits: raise TokenException(\"Got nothing to parse\") token, pos = self.parse_expression(0) if pos", "pos, \"\") != \")\": raise TokenException(\"Bracket not closed correctly\") else: item = Token.from_int(self.bits[pos])", "op = s # type: ignore pos += 1 class Advanced(Simple): def parse_sub_expression(self,", "re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str] = [ p for p in", "pos != len(self.bits): raise TokenException(\"Could not parse line\") self.token = token def parse_item(self,", "item, pos = self.parse_item(pos) token = Token.create(token, op, item) if (s := safe_get(self.bits,", "self.parse_item(pos) token = Token.create(token, op, item) if (s := safe_get(self.bits, pos, \"\")) not", "!= \"\" ] if not self.bits: raise TokenException(\"Got nothing to parse\") token, pos", "\"\") != \"+\": return token, pos pos += 1 def parse_expression(self, pos: int)", "Simple(line).token @staticmethod def advanced(line: str) -> Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token])", "def parse_sub_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None while True:", "pos op = s # type: ignore pos += 1 class Advanced(Simple): def", "pos: int) -> tuple[Token, int]: token: Optional[Token] = None while True: item, pos", "None while True: item, pos = self.parse_item(pos) token = Token.create(token, '+', item) if", "= self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket not", "'+', item) if safe_get(self.bits, pos, \"\") != \"+\": return token, pos pos +=", "@staticmethod def advanced(line: str) -> Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token]) ->", "<filename>d18/tokenizer.py import re from typing import Optional from common.utils import safe_get from .token", "for p in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if not self.bits: raise", "Token.create(token, '+', item) if safe_get(self.bits, pos, \"\") != \"+\": return token, pos pos", "list[str] = [ p for p in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ]", "self.parse_sub_expression(pos) token = Token.create(token, '*', item) if safe_get(self.bits, pos, \"\") != \"*\": return", "-> Token: return Advanced(line).token @staticmethod def sum(tokens: list[Token]) -> int: return sum(t.get_value() for", "pos, \"\")) not in Operations: return token, pos op = s # type:", "class Tokenizer: @staticmethod def simple(line: str) -> Token: return Simple(line).token @staticmethod def advanced(line:", "def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None while True:", "tuple[Token, int]: token: Optional[Token] = None while True: item, pos = self.parse_sub_expression(pos) token", "type: ignore pos += 1 class Advanced(Simple): def parse_sub_expression(self, pos: int) -> tuple[Token,", "__init__(self, line: str): self.bits: list[str] = [ p for p in Simple.pattern.findall(line.strip()) if", "\"\") != \")\": raise TokenException(\"Bracket not closed correctly\") else: item = Token.from_int(self.bits[pos]) return", "pos = self.parse_sub_expression(pos) token = Token.create(token, '*', item) if safe_get(self.bits, pos, \"\") !=", "-> Token: return Simple(line).token @staticmethod def advanced(line: str) -> Token: return Advanced(line).token @staticmethod", "pos = self.parse_expression(0) if pos != len(self.bits): raise TokenException(\"Could not parse line\") self.token", ".token import Op, Operations, Token class Tokenizer: @staticmethod def simple(line: str) -> Token:", "pos += 1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] =", "pos = self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket", "parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None op: Optional[Op] =", "item) if safe_get(self.bits, pos, \"\") != \"*\": return token, pos pos += 1", "pass class Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str] =", "if safe_get(self.bits, pos, \"\") != \"+\": return token, pos pos += 1 def", "= token def parse_item(self, pos: int) -> tuple[Token, int]: if self.bits[pos] == \"(\":", "= re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str] = [ p for p", "= [ p for p in Simple.pattern.findall(line.strip()) if p.strip() != \"\" ] if", "(s := safe_get(self.bits, pos, \"\")) not in Operations: return token, pos op =", "+ 1) if safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket not closed correctly\")", "] if not self.bits: raise TokenException(\"Got nothing to parse\") token, pos = self.parse_expression(0)", "import Op, Operations, Token class Tokenizer: @staticmethod def simple(line: str) -> Token: return", ":= safe_get(self.bits, pos, \"\")) not in Operations: return token, pos op = s", "raise TokenException(\"Got nothing to parse\") token, pos = self.parse_expression(0) if pos != len(self.bits):", "item) if (s := safe_get(self.bits, pos, \"\")) not in Operations: return token, pos", "token: Optional[Token] = None while True: item, pos = self.parse_item(pos) token = Token.create(token,", "\"+\": return token, pos pos += 1 def parse_expression(self, pos: int) -> tuple[Token,", "if p.strip() != \"\" ] if not self.bits: raise TokenException(\"Got nothing to parse\")", "return token, pos pos += 1 def parse_expression(self, pos: int) -> tuple[Token, int]:", "1 def parse_expression(self, pos: int) -> tuple[Token, int]: token: Optional[Token] = None while", "1) if safe_get(self.bits, pos, \"\") != \")\": raise TokenException(\"Bracket not closed correctly\") else:", "pos: int) -> tuple[Token, int]: token: Optional[Token] = None op: Optional[Op] = None", "= Token.create(token, '+', item) if safe_get(self.bits, pos, \"\") != \"+\": return token, pos", "raise TokenException(\"Could not parse line\") self.token = token def parse_item(self, pos: int) ->", "list[Token]) -> int: return sum(t.get_value() for t in tokens) class TokenException(Exception): pass class", "token = Token.create(token, op, item) if (s := safe_get(self.bits, pos, \"\")) not in", "self.bits[pos] == \"(\": item, pos = self.parse_expression(pos + 1) if safe_get(self.bits, pos, \"\")", "parse line\") self.token = token def parse_item(self, pos: int) -> tuple[Token, int]: if", "token: Optional[Token] = None while True: item, pos = self.parse_sub_expression(pos) token = Token.create(token,", "\"\" ] if not self.bits: raise TokenException(\"Got nothing to parse\") token, pos =", "token = Token.create(token, '*', item) if safe_get(self.bits, pos, \"\") != \"*\": return token,", "Simple: pattern = re.compile(r\"\\s+|\\d+|[+*]|\\(|\\)\") def __init__(self, line: str): self.bits: list[str] = [ p" ]
[ "Ensure point in North Atlantic is classified as Ocean. m = mask.interp({\"longitude\": -46.0,", "mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32, 360 * 32) mask", "0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def", "Atlantic is classified as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data,", "HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading of land mask. \"\"\" mask", "* 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16, 360 *", "16, 360 * 16) # Ensure point in North Atlantic is classified as", "GPROF-NN data processing. \"\"\" from datetime import datetime import pytest import numpy as", "\"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32, 360 * 32)", "\"\"\" Test reading of land mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape ==", "datetime import pytest import numpy as np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes)", "\"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test", "assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa is classified as land. data_i", "(read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor", "classified as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) #", "read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\")", "\"\"\" from datetime import datetime import pytest import numpy as np from gprof_nn.data.surface", "autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator autosnow_eq", "import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR,", "== (180 * 16, 360 * 16) # Ensure point in North Atlantic", "32) mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16, 360 * 16)", "16) # Ensure point in North Atlantic is classified as Ocean. m =", "gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not", "land mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32, 360", "around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0)", "of emissivity classes. \"\"\" data = read_emissivity_classes() # Ensure point in North Atlantic", "import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test", "m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure point in", "Test reading of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow", "classified as land. data_i = data.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(data_i.emissivity.data > 0)", "mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure point in Africa is", "np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading of", "Africa is classified as land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data", "pytest import numpy as np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor", "import numpy as np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import", "read_emissivity_classes() # Ensure point in North Atlantic is classified as Ocean. data_i =", "assert mask.mask.shape == (180 * 16, 360 * 16) # Ensure point in", "is classified as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0))", "0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\"", "HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading of autosnow files. \"\"\" autosnow", "gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\"", "0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading of autosnow files.", "np.isclose(m.mask.data, 0) # Ensure point in Africa is classified as land. m =", "read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def", "* 32, 360 * 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180 *", "autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR,", "has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading of land mask.", "Test reading of emissivity classes. \"\"\" data = read_emissivity_classes() # Ensure point in", "35.0}) assert np.isclose(m.mask.data, 0) # Ensure point in Africa is classified as land.", "@pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading of autosnow files. \"\"\"", "= has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading of land", "32, 360 * 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16,", "def test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes. \"\"\" data = read_emissivity_classes() #", "has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading", "> 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading of autosnow", "test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes. \"\"\" data = read_emissivity_classes() # Ensure", "read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0},", "in Africa is classified as land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert", "= read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32, 360 * 32) mask =", "\"\"\" Tests for the loading of surface maps for the GPROF-NN data processing.", "assert mask.mask.shape == (180 * 32, 360 * 32) mask = read_land_mask(\"MHS\") assert", "0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow():", "assert np.isclose(m.mask.data, 0) # Ensure point in Africa is classified as land. m", "Atlantic is classified as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data,", "is classified as land. data_i = data.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(data_i.emissivity.data >", "loading of surface maps for the GPROF-NN data processing. \"\"\" from datetime import", "HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes. \"\"\" data", "from datetime import datetime import pytest import numpy as np from gprof_nn.data.surface import", "classes. \"\"\" data = read_emissivity_classes() # Ensure point in North Atlantic is classified", "def test_read_land_mask(): \"\"\" Test reading of land mask. \"\"\" mask = read_land_mask(\"GMI\") assert", "\"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure point in Africa is classified as", "= mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\")", "@pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes. \"\"\"", "* 16) # Ensure point in North Atlantic is classified as Ocean. m", "Africa is classified as land. data_i = data.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(data_i.emissivity.data", "Tests for the loading of surface maps for the GPROF-NN data processing. \"\"\"", "# Ensure point in North Atlantic is classified as Ocean. data_i = data.interp({\"longitude\":", "point in North Atlantic is classified as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\":", "files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator autosnow_eq =", "North Atlantic is classified as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert", "maps for the GPROF-NN data processing. \"\"\" from datetime import datetime import pytest", "\"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator autosnow_eq = autosnow.interp({\"latitude\":", "of surface maps for the GPROF-NN data processing. \"\"\" from datetime import datetime", "np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR =", "-46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa is classified", "* 16, 360 * 16) # Ensure point in North Atlantic is classified", "as land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not", "data processing. \"\"\" from datetime import datetime import pytest import numpy as np", "datetime import datetime import pytest import numpy as np from gprof_nn.data.surface import (read_land_mask,", "# Ensure no snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\")", "m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor", "assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading", "-46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure point in Africa is classified", "snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data ==", "Test reading of land mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180", "# Ensure point in North Atlantic is classified as Ocean. m = mask.interp({\"longitude\":", "processing. \"\"\" from datetime import datetime import pytest import numpy as np from", "def test_read_autosnow(): \"\"\" Test reading of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") #", "= data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa", "Ensure point in Africa is classified as land. data_i = data.interp({\"longitude\": 0.0, \"latitude\":", "land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR,", "0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes.", "0)) # Ensure point in Africa is classified as land. data_i = data.interp({\"longitude\":", "surface maps for the GPROF-NN data processing. \"\"\" from datetime import datetime import", "as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure", "from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor()", "missing.\") def test_read_land_mask(): \"\"\" Test reading of land mask. \"\"\" mask = read_land_mask(\"GMI\")", "360 * 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16, 360", "as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure", "20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test", "reading of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around", "point in North Atlantic is classified as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\":", "reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes. \"\"\" data =", "data = read_emissivity_classes() # Ensure point in North Atlantic is classified as Ocean.", "classified as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) #", "Ensure no snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert", "in Africa is classified as land. data_i = data.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert", "reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading of land mask. \"\"\" mask =", "== (180 * 32, 360 * 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape ==", "import pytest import numpy as np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from", "\"\"\" Test reading of emissivity classes. \"\"\" data = read_emissivity_classes() # Ensure point", "emissivity classes. \"\"\" data = read_emissivity_classes() # Ensure point in North Atlantic is", "\"\"\" data = read_emissivity_classes() # Ensure point in North Atlantic is classified as", "= mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure point in Africa", "mask.mask.shape == (180 * 16, 360 * 16) # Ensure point in North", "in North Atlantic is classified as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0})", "no snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data", "from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask():", "# Ensure point in Africa is classified as land. m = mask.interp({\"longitude\": 0.0,", "reading of land mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180 *", "point in Africa is classified as land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0})", "missing.\") def test_read_autosnow(): \"\"\" Test reading of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\")", "missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of emissivity classes. \"\"\" data = read_emissivity_classes()", "== 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of emissivity", "in North Atlantic is classified as Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0})", "\"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa is classified as", "is classified as land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data >", "autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0,", "\"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\"", "mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def", "read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16, 360 * 16) # Ensure point", "the GPROF-NN data processing. \"\"\" from datetime import datetime import pytest import numpy", "np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa is classified as land. data_i =", "autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\")", "import datetime import pytest import numpy as np from gprof_nn.data.surface import (read_land_mask, read_autosnow,", "reason=\"Preprocessor missing.\") def test_read_autosnow(): \"\"\" Test reading of autosnow files. \"\"\" autosnow =", "classified as land. m = mask.interp({\"longitude\": 0.0, \"latitude\": 20.0}) assert np.all(m.mask.data > 0)", "= read_emissivity_classes() # Ensure point in North Atlantic is classified as Ocean. data_i", "Ensure point in Africa is classified as land. m = mask.interp({\"longitude\": 0.0, \"latitude\":", "numpy as np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor", "HAS_PREPROCESSOR = has_preprocessor() @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading of", "mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32, 360 *", "Ensure point in North Atlantic is classified as Ocean. data_i = data.interp({\"longitude\": -46.0,", "of land mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32,", "test_read_autosnow(): \"\"\" Test reading of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure", "360 * 16) # Ensure point in North Atlantic is classified as Ocean.", "# Ensure point in Africa is classified as land. data_i = data.interp({\"longitude\": 0.0,", "for the loading of surface maps for the GPROF-NN data processing. \"\"\" from", "(180 * 32, 360 * 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180", "\"\"\" Test reading of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no", "for the GPROF-NN data processing. \"\"\" from datetime import datetime import pytest import", "as np from gprof_nn.data.surface import (read_land_mask, read_autosnow, read_emissivity_classes) from gprof_nn.data.preprocessor import has_preprocessor HAS_PREPROCESSOR", "read_land_mask(\"GMI\") assert mask.mask.shape == (180 * 32, 360 * 32) mask = read_land_mask(\"MHS\")", "data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in", "Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0) # Ensure point", "North Atlantic is classified as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert", "35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa is classified as land.", "mask.mask.shape == (180 * 32, 360 * 32) mask = read_land_mask(\"MHS\") assert mask.mask.shape", "the loading of surface maps for the GPROF-NN data processing. \"\"\" from datetime", "= read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16, 360 * 16) # Ensure", "of autosnow files. \"\"\" autosnow = read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator", "(180 * 16, 360 * 16) # Ensure point in North Atlantic is", "= read_autosnow(\"2021-01-01T00:00:00\") # Ensure no snow around equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\":", "test_read_land_mask(): \"\"\" Test reading of land mask. \"\"\" mask = read_land_mask(\"GMI\") assert mask.mask.shape", "= autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor", "equator autosnow_eq = autosnow.interp({\"latitude\": 0.0, \"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not", "np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading of", "data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point in Africa is", "is classified as Ocean. m = mask.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.isclose(m.mask.data, 0)", "mask = read_land_mask(\"MHS\") assert mask.mask.shape == (180 * 16, 360 * 16) #", "point in Africa is classified as land. data_i = data.interp({\"longitude\": 0.0, \"latitude\": 20.0})", "0) # Ensure point in Africa is classified as land. m = mask.interp({\"longitude\":", "\"longitude\": 0.0}, \"nearest\") assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes():", "assert np.all(autosnow_eq.snow.data == 0) @pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_emissivity_classes(): \"\"\" Test reading", "Ocean. data_i = data.interp({\"longitude\": -46.0, \"latitude\": 35.0}) assert np.all(np.isclose(data_i.emissivity.data, 0)) # Ensure point", "reading of emissivity classes. \"\"\" data = read_emissivity_classes() # Ensure point in North", "@pytest.mark.skipif(not HAS_PREPROCESSOR, reason=\"Preprocessor missing.\") def test_read_land_mask(): \"\"\" Test reading of land mask. \"\"\"" ]
[ "value).strip().lower() value = re.sub('[-\\s]+', '_', value) return value def main(): VERSION = 0.1", "put the split files into. Defaults to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true')", "os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for", "%s\" % good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False) return if", "cat in categories: fname = str(cat) good_fname = outdir + \"/\" + slugify(fname)", "'', value).strip().lower() value = re.sub('[-\\s]+', '_', value) return value def main(): VERSION =", "open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False) return if __name__ == \"__main__\": main()", "argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file for a Galaxy server into individual", "tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname = str(cat) good_fname = outdir +", "outdir = args.outdir if args.verbose: print('Outdir: %s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir)", "re.sub('\\.yml', '', filename) if args.outdir: outdir = args.outdir if args.verbose: print('Outdir: %s' %", "= 0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file for a", "import defaultdict import re import os import argparse def slugify(value): \"\"\" Normalizes string,", "non-alpha characters, and converts spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower()", "and converts spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value =", "= re.sub('[-\\s]+', '_', value) return value def main(): VERSION = 0.1 parser =", "spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_',", "Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The", "filename = args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '', filename)", "help=\"The output directory to put the split files into. Defaults to infile without", "args.verbose: print('Outdir: %s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories", "print('Outdir: %s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories =", "tools = a['tools'] categories = defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat", "= a['tools'] categories = defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in", "tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname = str(cat) good_fname =", "\"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_', value) return value", "value) return value def main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up a", "hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_', value) return", "value = re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_', value) return value def", "into individual files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml", "% outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list) for", "action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename", "files into. Defaults to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args", "% good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False) return if __name__", "on: %s\" % good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False) return", "to put the split files into. Defaults to infile without the .yml.\") parser.add_argument(\"--version\",", "defaultdict import re import os import argparse def slugify(value): \"\"\" Normalizes string, converts", "'r'), ) outdir = re.sub('\\.yml', '', filename) if args.outdir: outdir = args.outdir if", "good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False) return if __name__ ==", "lowercase, removes non-alpha characters, and converts spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]',", "value def main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list`", "if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename = args.infile a =", "not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list) for tool in tools:", "python import yaml from collections import defaultdict import re import os import argparse", "string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. \"\"\"", "a Galaxy server into individual files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The", "to lowercase, removes non-alpha characters, and converts spaces to hyphens. \"\"\" value =", "collections import defaultdict import re import os import argparse def slugify(value): \"\"\" Normalizes", "to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if", "a = yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '', filename) if args.outdir: outdir", "re.sub('[-\\s]+', '_', value) return value def main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits", "Galaxy server into individual files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned", "directory to put the split files into. Defaults to infile without the .yml.\")", "a['tools'] categories = defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories:", "parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename = args.infile a", ") outdir = re.sub('\\.yml', '', filename) if args.outdir: outdir = args.outdir if args.verbose:", "in categories: fname = str(cat) good_fname = outdir + \"/\" + slugify(fname) +", "x: x['name'] + x['owner'])} if args.verbose: print(\"Working on: %s\" % good_fname) with open(good_fname,", "= re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_', value) return value def main():", "'', filename) if args.outdir: outdir = args.outdir if args.verbose: print('Outdir: %s' % outdir)", "categories: fname = str(cat) good_fname = outdir + \"/\" + slugify(fname) + \".yml\"", "yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put the split", "characters, and converts spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value", "argparse def slugify(value): \"\"\" Normalizes string, converts to lowercase, removes non-alpha characters, and", "= argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file for a Galaxy server into", "#!/usr/bin/env python import yaml from collections import defaultdict import re import os import", "outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list) for tool", "individual files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file", "os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool)", "import os import argparse def slugify(value): \"\"\" Normalizes string, converts to lowercase, removes", "action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION)", "parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file for a Galaxy server", "parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output", "the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py version:", "= args.outdir if args.verbose: print('Outdir: %s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools", "x['name'] + x['owner'])} if args.verbose: print(\"Working on: %s\" % good_fname) with open(good_fname, 'w')", "converts spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+',", "to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_', value)", "into. Defaults to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args =", "args.verbose: print(\"Working on: %s\" % good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile,", "{'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if args.verbose: print(\"Working on: %s\" %", "slugify(value): \"\"\" Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces", "infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version:", "fname = str(cat) good_fname = outdir + \"/\" + slugify(fname) + \".yml\" tool_yaml", "file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put the split files", "a Ephemeris `get_tool_list` yml file for a Galaxy server into individual files for", "`get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put the", "\"--infile\", help=\"The returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory", "def main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml", "+ slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])}", "VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file for", "up a Ephemeris `get_tool_list` yml file for a Galaxy server into individual files", "args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '', filename) if args.outdir:", "def slugify(value): \"\"\" Normalizes string, converts to lowercase, removes non-alpha characters, and converts", "print(\"Working on: %s\" % good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False)", "args.outdir if args.verbose: print('Outdir: %s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools =", "return filename = args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '',", "for cat in categories: fname = str(cat) good_fname = outdir + \"/\" +", "if args.verbose: print(\"Working on: %s\" % good_fname) with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml,", ".yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\"", "args = parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename =", "with open(good_fname, 'w') as outfile: yaml.dump(tool_yaml, outfile, default_flow_style=False) return if __name__ == \"__main__\":", "filename) if args.outdir: outdir = args.outdir if args.verbose: print('Outdir: %s' % outdir) if", "if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list) for tool in", "Ephemeris `get_tool_list` yml file for a Galaxy server into individual files for each", "version: %.1f\" % VERSION) return filename = args.infile a = yaml.safe_load(open(filename, 'r'), )", "Defaults to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args()", "good_fname = outdir + \"/\" + slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat],", "import argparse def slugify(value): \"\"\" Normalizes string, converts to lowercase, removes non-alpha characters,", "yml file for a Galaxy server into individual files for each Section Label.\")", "if args.verbose: print('Outdir: %s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools']", "= parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename = args.infile", "from collections import defaultdict import re import os import argparse def slugify(value): \"\"\"", "for a Galaxy server into individual files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\",", "parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return", "help=\"The returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to", "VERSION) return filename = args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml',", "= re.sub('\\.yml', '', filename) if args.outdir: outdir = args.outdir if args.verbose: print('Outdir: %s'", "yaml from collections import defaultdict import re import os import argparse def slugify(value):", "parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py version: %.1f\" %", "args.version: print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename = args.infile a = yaml.safe_load(open(filename,", "args.outdir: outdir = args.outdir if args.verbose: print('Outdir: %s' % outdir) if not os.path.isdir(outdir):", "% VERSION) return filename = args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir =", "import re import os import argparse def slugify(value): \"\"\" Normalizes string, converts to", "`get_tool_list` yml file for a Galaxy server into individual files for each Section", "categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname = str(cat) good_fname = outdir + \"/\"", "+ x['owner'])} if args.verbose: print(\"Working on: %s\" % good_fname) with open(good_fname, 'w') as", "0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file for a Galaxy", "for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname = str(cat) good_fname", "re.sub('[^\\w\\s-]', '', value).strip().lower() value = re.sub('[-\\s]+', '_', value) return value def main(): VERSION", "\"/\" + slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name'] +", "= args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '', filename) if", "+ \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if args.verbose:", "yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '', filename) if args.outdir: outdir = args.outdir", "return value def main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris", "= yaml.safe_load(open(filename, 'r'), ) outdir = re.sub('\\.yml', '', filename) if args.outdir: outdir =", "for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file to split.\")", "returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put", "import yaml from collections import defaultdict import re import os import argparse def", "slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if", "'_', value) return value def main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up", "= {'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if args.verbose: print(\"Working on: %s\"", "removes non-alpha characters, and converts spaces to hyphens. \"\"\" value = re.sub('[^\\w\\s-]', '',", "without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true') args = parser.parse_args() if args.version: print(\"split_tool_yml.py", "str(cat) good_fname = outdir + \"/\" + slugify(fname) + \".yml\" tool_yaml = {'tools':", "\".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if args.verbose: print(\"Working", "tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if args.verbose: print(\"Working on:", "sorted(categories[cat], key=lambda x: x['name'] + x['owner'])} if args.verbose: print(\"Working on: %s\" % good_fname)", "main(): VERSION = 0.1 parser = argparse.ArgumentParser(description=\"Splits up a Ephemeris `get_tool_list` yml file", "os import argparse def slugify(value): \"\"\" Normalizes string, converts to lowercase, removes non-alpha", "= str(cat) good_fname = outdir + \"/\" + slugify(fname) + \".yml\" tool_yaml =", "+ \"/\" + slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x: x['name']", "split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put the split files into. Defaults", "categories = defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname", "x['owner'])} if args.verbose: print(\"Working on: %s\" % good_fname) with open(good_fname, 'w') as outfile:", "%s' % outdir) if not os.path.isdir(outdir): os.mkdir(outdir) tools = a['tools'] categories = defaultdict(list)", "outdir + \"/\" + slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda x:", "in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname = str(cat) good_fname = outdir", "= defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname =", "value = re.sub('[-\\s]+', '_', value) return value def main(): VERSION = 0.1 parser", "output directory to put the split files into. Defaults to infile without the", "defaultdict(list) for tool in tools: categories[tool['tool_panel_section_label']].append(tool) for cat in categories: fname = str(cat)", "files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file to", "server into individual files for each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list`", "%.1f\" % VERSION) return filename = args.infile a = yaml.safe_load(open(filename, 'r'), ) outdir", "Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens.", "converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. \"\"\" value", "file for a Galaxy server into individual files for each Section Label.\") parser.add_argument(\"-i\",", "print(\"split_tool_yml.py version: %.1f\" % VERSION) return filename = args.infile a = yaml.safe_load(open(filename, 'r'),", "outdir = re.sub('\\.yml', '', filename) if args.outdir: outdir = args.outdir if args.verbose: print('Outdir:", "= outdir + \"/\" + slugify(fname) + \".yml\" tool_yaml = {'tools': sorted(categories[cat], key=lambda", "to split.\") parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put the split files into.", "\"--outdir\", help=\"The output directory to put the split files into. Defaults to infile", "\"\"\" Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces to", "re import os import argparse def slugify(value): \"\"\" Normalizes string, converts to lowercase,", "each Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\",", "key=lambda x: x['name'] + x['owner'])} if args.verbose: print(\"Working on: %s\" % good_fname) with", "the split files into. Defaults to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\",", "Section Label.\") parser.add_argument(\"-i\", \"--infile\", help=\"The returned `get_tool_list` yml file to split.\") parser.add_argument(\"-o\", \"--outdir\",", "split files into. Defaults to infile without the .yml.\") parser.add_argument(\"--version\", action='store_true') parser.add_argument(\"--verbose\", action='store_true')", "if args.outdir: outdir = args.outdir if args.verbose: print('Outdir: %s' % outdir) if not", "parser.add_argument(\"-o\", \"--outdir\", help=\"The output directory to put the split files into. Defaults to" ]
[ "# class TestHub(unittest.TestCase): # ''' # General tests for the hub.py file #", "hub # class TestHub(unittest.TestCase): # ''' # General tests for the hub.py file", "flag is set to true. # ''' # global XBEE_FLAG # XBEE_FLAG =", "sys # import unittest sys.path.insert(0, \"openpod/\") # import hub # class TestHub(unittest.TestCase): #", "class TestHub(unittest.TestCase): # ''' # General tests for the hub.py file # '''", "# ''' # global XBEE_FLAG # XBEE_FLAG = False # hub.incoming_xbee_data() # self.assertTrue(XBEE_FLAG)", "# def test_xbee_flag_set_true(self): # ''' # Check if the xbee flag is set", "test_xbee_flag_set_true(self): # ''' # Check if the xbee flag is set to true.", "hub.py file # ''' # def test_xbee_flag_set_true(self): # ''' # Check if the", "is set to true. # ''' # global XBEE_FLAG # XBEE_FLAG = False", "pod.py ''' import sys # import unittest sys.path.insert(0, \"openpod/\") # import hub #", "set to true. # ''' # global XBEE_FLAG # XBEE_FLAG = False #", "test for pod.py ''' import sys # import unittest sys.path.insert(0, \"openpod/\") # import", "import unittest sys.path.insert(0, \"openpod/\") # import hub # class TestHub(unittest.TestCase): # ''' #", "the hub.py file # ''' # def test_xbee_flag_set_true(self): # ''' # Check if", "# ''' # General tests for the hub.py file # ''' # def", "General tests for the hub.py file # ''' # def test_xbee_flag_set_true(self): # '''", "xbee flag is set to true. # ''' # global XBEE_FLAG # XBEE_FLAG", "TestHub(unittest.TestCase): # ''' # General tests for the hub.py file # ''' #", "Check if the xbee flag is set to true. # ''' # global", "''' # def test_xbee_flag_set_true(self): # ''' # Check if the xbee flag is", "# General tests for the hub.py file # ''' # def test_xbee_flag_set_true(self): #", "for the hub.py file # ''' # def test_xbee_flag_set_true(self): # ''' # Check", "''' # Check if the xbee flag is set to true. # '''", "''' import sys # import unittest sys.path.insert(0, \"openpod/\") # import hub # class", "''' # General tests for the hub.py file # ''' # def test_xbee_flag_set_true(self):", "tests for the hub.py file # ''' # def test_xbee_flag_set_true(self): # ''' #", "file # ''' # def test_xbee_flag_set_true(self): # ''' # Check if the xbee", "import sys # import unittest sys.path.insert(0, \"openpod/\") # import hub # class TestHub(unittest.TestCase):", "\"openpod/\") # import hub # class TestHub(unittest.TestCase): # ''' # General tests for", "for pod.py ''' import sys # import unittest sys.path.insert(0, \"openpod/\") # import hub", "# Check if the xbee flag is set to true. # ''' #", "to true. # ''' # global XBEE_FLAG # XBEE_FLAG = False # hub.incoming_xbee_data()", "if the xbee flag is set to true. # ''' # global XBEE_FLAG", "Unit test for pod.py ''' import sys # import unittest sys.path.insert(0, \"openpod/\") #", "# import hub # class TestHub(unittest.TestCase): # ''' # General tests for the", "# ''' # Check if the xbee flag is set to true. #", "the xbee flag is set to true. # ''' # global XBEE_FLAG #", "true. # ''' # global XBEE_FLAG # XBEE_FLAG = False # hub.incoming_xbee_data() #", "''' Unit test for pod.py ''' import sys # import unittest sys.path.insert(0, \"openpod/\")", "import hub # class TestHub(unittest.TestCase): # ''' # General tests for the hub.py", "def test_xbee_flag_set_true(self): # ''' # Check if the xbee flag is set to", "# ''' # def test_xbee_flag_set_true(self): # ''' # Check if the xbee flag", "sys.path.insert(0, \"openpod/\") # import hub # class TestHub(unittest.TestCase): # ''' # General tests", "unittest sys.path.insert(0, \"openpod/\") # import hub # class TestHub(unittest.TestCase): # ''' # General", "# import unittest sys.path.insert(0, \"openpod/\") # import hub # class TestHub(unittest.TestCase): # '''" ]
[ "I0, ..., In, CIN -> O, COUT # def FullCarry(k, expr6, expr5): assert", "if k >= 1: args += ['input I0', lut.I0] else: wire(1, lut.I0) if", "[ \"output O\", O, \"input CIN\", CIN, \"output COUT\", COUT ] return AnonymousCircuit(", ">= 1: args += ['input I0', lut.I0] else: wire(1, lut.I0) if k >=", "import * from ..spartan6.CLB import CARRY from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder']", "expr5): assert k <= 5 lut = LUT5x2(expr5, expr6) args = [] if", "4: args += ['input I3', lut.I3] else: wire(1, lut.I3) if k >= 5:", "O, COUT = CARRY(lut.O6, lut.O5, CIN) args += [ \"output O\", O, \"input", "* from ..spartan6.CLB import CARRY from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] #", "__all__ = ['FullAdder'] # # return I0, ..., In, CIN -> O, COUT", ">= 2: args += ['input I1', lut.I1] else: wire(1, lut.I1) if k >=", "In, CIN -> O, COUT # def FullCarry(k, expr6, expr5): assert k <=", "O, \"input CIN\", CIN, \"output COUT\", COUT ] return AnonymousCircuit( args ) def", "else: wire(1, lut.I0) if k >= 2: args += ['input I1', lut.I1] else:", "+= ['input I1', lut.I1] else: wire(1, lut.I1) if k >= 3: args +=", "LUT5x2 __all__ = ['FullAdder'] # # return I0, ..., In, CIN -> O,", "..., In, CIN -> O, COUT # def FullCarry(k, expr6, expr5): assert k", "k >= 5: args += ['input I4', lut.I4] else: wire(1, lut.I4) CIN =", "..spartan6.CLB import CARRY from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] # # return", "args += ['input I3', lut.I3] else: wire(1, lut.I3) if k >= 5: args", "import CARRY from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] # # return I0,", "wire(1, lut.I4) CIN = Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN) args +=", ">= 4: args += ['input I3', lut.I3] else: wire(1, lut.I3) if k >=", "= ['FullAdder'] # # return I0, ..., In, CIN -> O, COUT #", ">= 3: args += ['input I2', lut.I2] else: wire(1, lut.I2) if k >=", "O\", O, \"input CIN\", CIN, \"output COUT\", COUT ] return AnonymousCircuit( args )", "I1', lut.I1] else: wire(1, lut.I1) if k >= 3: args += ['input I2',", "magma import * from ..spartan6.CLB import CARRY from ..spartan6.LUT import LUT5x2 __all__ =", "lut.I2) if k >= 4: args += ['input I3', lut.I3] else: wire(1, lut.I3)", "expr6) args = [] if k >= 1: args += ['input I0', lut.I0]", "3: args += ['input I2', lut.I2] else: wire(1, lut.I2) if k >= 4:", "I3', lut.I3] else: wire(1, lut.I3) if k >= 5: args += ['input I4',", "else: wire(1, lut.I4) CIN = Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN) args", "<= 5 lut = LUT5x2(expr5, expr6) args = [] if k >= 1:", "lut = LUT5x2(expr5, expr6) args = [] if k >= 1: args +=", "k >= 3: args += ['input I2', lut.I2] else: wire(1, lut.I2) if k", "I4', lut.I4] else: wire(1, lut.I4) CIN = Bit() O, COUT = CARRY(lut.O6, lut.O5,", "# return I0, ..., In, CIN -> O, COUT # def FullCarry(k, expr6,", "I2', lut.I2] else: wire(1, lut.I2) if k >= 4: args += ['input I3',", "args += [ \"output O\", O, \"input CIN\", CIN, \"output COUT\", COUT ]", "5: args += ['input I4', lut.I4] else: wire(1, lut.I4) CIN = Bit() O,", "if k >= 2: args += ['input I1', lut.I1] else: wire(1, lut.I1) if", "args += ['input I1', lut.I1] else: wire(1, lut.I1) if k >= 3: args", "1: args += ['input I0', lut.I0] else: wire(1, lut.I0) if k >= 2:", "+= [ \"output O\", O, \"input CIN\", CIN, \"output COUT\", COUT ] return", "CIN = Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN) args += [ \"output", "= CARRY(lut.O6, lut.O5, CIN) args += [ \"output O\", O, \"input CIN\", CIN,", "+= ['input I3', lut.I3] else: wire(1, lut.I3) if k >= 5: args +=", "2: args += ['input I1', lut.I1] else: wire(1, lut.I1) if k >= 3:", "k >= 2: args += ['input I1', lut.I1] else: wire(1, lut.I1) if k", "# def FullCarry(k, expr6, expr5): assert k <= 5 lut = LUT5x2(expr5, expr6)", "= LUT5x2(expr5, expr6) args = [] if k >= 1: args += ['input", "# # return I0, ..., In, CIN -> O, COUT # def FullCarry(k,", "O, COUT # def FullCarry(k, expr6, expr5): assert k <= 5 lut =", "lut.I4) CIN = Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN) args += [", "-> O, COUT # def FullCarry(k, expr6, expr5): assert k <= 5 lut", "FullCarry(k, expr6, expr5): assert k <= 5 lut = LUT5x2(expr5, expr6) args =", "['input I3', lut.I3] else: wire(1, lut.I3) if k >= 5: args += ['input", "lut.I1) if k >= 3: args += ['input I2', lut.I2] else: wire(1, lut.I2)", "if k >= 5: args += ['input I4', lut.I4] else: wire(1, lut.I4) CIN", "k >= 4: args += ['input I3', lut.I3] else: wire(1, lut.I3) if k", "lut.I2] else: wire(1, lut.I2) if k >= 4: args += ['input I3', lut.I3]", "expr6, expr5): assert k <= 5 lut = LUT5x2(expr5, expr6) args = []", "args += ['input I4', lut.I4] else: wire(1, lut.I4) CIN = Bit() O, COUT", "assert k <= 5 lut = LUT5x2(expr5, expr6) args = [] if k", "from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] # # return I0, ..., In,", "args += ['input I0', lut.I0] else: wire(1, lut.I0) if k >= 2: args", "COUT = CARRY(lut.O6, lut.O5, CIN) args += [ \"output O\", O, \"input CIN\",", "return I0, ..., In, CIN -> O, COUT # def FullCarry(k, expr6, expr5):", "if k >= 3: args += ['input I2', lut.I2] else: wire(1, lut.I2) if", "+= ['input I4', lut.I4] else: wire(1, lut.I4) CIN = Bit() O, COUT =", "lut.I1] else: wire(1, lut.I1) if k >= 3: args += ['input I2', lut.I2]", "else: wire(1, lut.I1) if k >= 3: args += ['input I2', lut.I2] else:", "COUT # def FullCarry(k, expr6, expr5): assert k <= 5 lut = LUT5x2(expr5,", "k >= 1: args += ['input I0', lut.I0] else: wire(1, lut.I0) if k", "..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] # # return I0, ..., In, CIN", "wire(1, lut.I2) if k >= 4: args += ['input I3', lut.I3] else: wire(1,", "LUT5x2(expr5, expr6) args = [] if k >= 1: args += ['input I0',", "CIN) args += [ \"output O\", O, \"input CIN\", CIN, \"output COUT\", COUT", "lut.I3] else: wire(1, lut.I3) if k >= 5: args += ['input I4', lut.I4]", "import LUT5x2 __all__ = ['FullAdder'] # # return I0, ..., In, CIN ->", "= [] if k >= 1: args += ['input I0', lut.I0] else: wire(1,", "['input I1', lut.I1] else: wire(1, lut.I1) if k >= 3: args += ['input", "else: wire(1, lut.I3) if k >= 5: args += ['input I4', lut.I4] else:", "from ..spartan6.CLB import CARRY from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] # #", "lut.I0] else: wire(1, lut.I0) if k >= 2: args += ['input I1', lut.I1]", "if k >= 4: args += ['input I3', lut.I3] else: wire(1, lut.I3) if", "lut.O5, CIN) args += [ \"output O\", O, \"input CIN\", CIN, \"output COUT\",", "+= ['input I0', lut.I0] else: wire(1, lut.I0) if k >= 2: args +=", "lut.I4] else: wire(1, lut.I4) CIN = Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN)", "lut.I3) if k >= 5: args += ['input I4', lut.I4] else: wire(1, lut.I4)", "CARRY from ..spartan6.LUT import LUT5x2 __all__ = ['FullAdder'] # # return I0, ...,", "args = [] if k >= 1: args += ['input I0', lut.I0] else:", ">= 5: args += ['input I4', lut.I4] else: wire(1, lut.I4) CIN = Bit()", "= Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN) args += [ \"output O\",", "I0', lut.I0] else: wire(1, lut.I0) if k >= 2: args += ['input I1',", "from magma import * from ..spartan6.CLB import CARRY from ..spartan6.LUT import LUT5x2 __all__", "['input I0', lut.I0] else: wire(1, lut.I0) if k >= 2: args += ['input", "wire(1, lut.I3) if k >= 5: args += ['input I4', lut.I4] else: wire(1,", "Bit() O, COUT = CARRY(lut.O6, lut.O5, CIN) args += [ \"output O\", O,", "args += ['input I2', lut.I2] else: wire(1, lut.I2) if k >= 4: args", "['FullAdder'] # # return I0, ..., In, CIN -> O, COUT # def", "else: wire(1, lut.I2) if k >= 4: args += ['input I3', lut.I3] else:", "CIN -> O, COUT # def FullCarry(k, expr6, expr5): assert k <= 5", "k <= 5 lut = LUT5x2(expr5, expr6) args = [] if k >=", "+= ['input I2', lut.I2] else: wire(1, lut.I2) if k >= 4: args +=", "wire(1, lut.I1) if k >= 3: args += ['input I2', lut.I2] else: wire(1,", "['input I2', lut.I2] else: wire(1, lut.I2) if k >= 4: args += ['input", "\"input CIN\", CIN, \"output COUT\", COUT ] return AnonymousCircuit( args ) def FullAdder():", "['input I4', lut.I4] else: wire(1, lut.I4) CIN = Bit() O, COUT = CARRY(lut.O6,", "def FullCarry(k, expr6, expr5): assert k <= 5 lut = LUT5x2(expr5, expr6) args", "lut.I0) if k >= 2: args += ['input I1', lut.I1] else: wire(1, lut.I1)", "wire(1, lut.I0) if k >= 2: args += ['input I1', lut.I1] else: wire(1,", "CARRY(lut.O6, lut.O5, CIN) args += [ \"output O\", O, \"input CIN\", CIN, \"output", "\"output O\", O, \"input CIN\", CIN, \"output COUT\", COUT ] return AnonymousCircuit( args", "[] if k >= 1: args += ['input I0', lut.I0] else: wire(1, lut.I0)", "5 lut = LUT5x2(expr5, expr6) args = [] if k >= 1: args", "CIN\", CIN, \"output COUT\", COUT ] return AnonymousCircuit( args ) def FullAdder(): pass" ]
[ "DEBUG=True, ) from django.db import models class Setting(models.Model): key = models.CharField(max_length=32, unique=True) value", "settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests',", "{ 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from", "<filename>tests/models.py from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE':", "'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db", "'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db import models class", "}, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db import models class Setting(models.Model): key", "django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME':", "INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db import models class Setting(models.Model): key =", "django.db import models class Setting(models.Model): key = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=32, default='')", "from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3',", "'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db import", "not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[", "DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True,", "], DEBUG=True, ) from django.db import models class Setting(models.Model): key = models.CharField(max_length=32, unique=True)", "'tests', ], DEBUG=True, ) from django.db import models class Setting(models.Model): key = models.CharField(max_length=32,", "} }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db import models class Setting(models.Model):", "settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ],", "settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' }", "if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } },", ") from django.db import models class Setting(models.Model): key = models.CharField(max_length=32, unique=True) value =", "'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, ) from django.db import models", "import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db'", "'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test.db' } }, INSTALLED_APPS=[ 'tests', ], DEBUG=True, )", "from django.db import models class Setting(models.Model): key = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=32," ]
[ "EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self,", "pdescr, args, pos, val): if val is None: args.setString(pos, None) elif isinstance(val, str):", "# Licensed under the MIT License # https://opensource.org/licenses/MIT from jvm.lib import public from", "str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value", "args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else:", ".._constants import EMatch from .._jvm import JVM from ._base_handler import _ObjectHandler @public class", "return None else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val def getStatic(self,", "fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else:", "getInstance(self, fld, this): return fld.getString(this) def setInstance(self, fld, this, val): if val is", "string\") def toPython(self, val): if val is None: return None else: if isinstance(val,", "None: return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() ==", "fld.getString(this) def setInstance(self, fld, this, val): if val is None: fld.setString(this, None) elif", "def toJava(self, val): if val is None: return None elif isinstance(val, str): return", "to Java string\") def getInstance(self, fld, this): return fld.getString(this) def setInstance(self, fld, this,", "meth, cls, args): value = meth.callStaticString(cls, args) return value def callInstance(self, meth, this,", "__slots__ = () def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return", "this, val): if val is None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val)", "isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert", "EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return", "isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return", "return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert", "args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def callStatic(self, meth,", "def match(self, val): if val is None: return EMatch.IMPLICIT elif isinstance(val, str): return", "val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def callStatic(self, meth, cls,", "is None: return None else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val", "import public from jvm.lib import cached from .._constants import EJavaType from .._constants import", "val): if val is None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val) elif", "value to Java string\") def callStatic(self, meth, cls, args): value = meth.callStaticString(cls, args)", "cls, args): value = meth.callStaticString(cls, args) return value def callInstance(self, meth, this, args):", "TypeError(\"Cannot convert value to Java string\") def toPython(self, val): if val is None:", "isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def", "from .._constants import EJavaType from .._constants import EMatch from .._jvm import JVM from", "elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\")", "elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT", "if val is None: return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val,", "isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value to Java", "value = meth.callStaticString(cls, args) return value def callInstance(self, meth, this, args): value =", "EMatch from .._jvm import JVM from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__", "def setArgument(self, pdescr, args, pos, val): if val is None: args.setString(pos, None) elif", "TypeError(\"Cannot convert value to Java string\") def callStatic(self, meth, cls, args): value =", "TypeError(\"Cannot convert value to Java string\") def setArgument(self, pdescr, args, pos, val): if", "JVM from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = () def __init__(self,", "val.stringValue() return val def getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self, fld, cls,", "None: return None else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val def", "return fld.getStaticString(cls) def setStatic(self, fld, cls, val): if val is None: fld.setStaticString(cls, None)", "self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self, val): if val is None: return", "val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java", "EMatch.PERFECT return EMatch.NONE def toJava(self, val): if val is None: return None elif", "() def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def", "return EMatch.PERFECT return EMatch.NONE def toJava(self, val): if val is None: return None", "match(self, val): if val is None: return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT", "import EMatch from .._jvm import JVM from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler):", "self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value to Java string\") def toPython(self, val):", "None else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val def getStatic(self, fld,", "val is None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()):", "to Java string\") def toPython(self, val): if val is None: return None else:", "._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state): super().__init__(state,", "setInstance(self, fld, this, val): if val is None: fld.setString(this, None) elif isinstance(val, str):", "Licensed under the MIT License # https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib", "EJavaType from .._constants import EMatch from .._jvm import JVM from ._base_handler import _ObjectHandler", "raise TypeError(\"Cannot convert value to Java string\") def getInstance(self, fld, this): return fld.getString(this)", "self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def callStatic(self,", "# Copyright (c) 2015-2022 <NAME> # Licensed under the MIT License # https://opensource.org/licenses/MIT", "Copyright (c) 2015-2022 <NAME> # Licensed under the MIT License # https://opensource.org/licenses/MIT from", "elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot", "string\") def getInstance(self, fld, this): return fld.getString(this) def setInstance(self, fld, this, val): if", "val is None: return None else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return", "= val.stringValue() return val def getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self, fld,", "args, pos, val): if val is None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos,", "toPython(self, val): if val is None: return None else: if isinstance(val, self._jt_jvm.JObject): val", "= () def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String", "jvm.lib import public from jvm.lib import cached from .._constants import EJavaType from .._constants", "return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class():", "value to Java string\") def toPython(self, val): if val is None: return None", "None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__)", "__init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def match(self, val):", "state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def match(self, val): if", "return val.__javaobject__ raise TypeError(\"Cannot convert value to Java string\") def toPython(self, val): if", "cls): return fld.getStaticString(cls) def setStatic(self, fld, cls, val): if val is None: fld.setStaticString(cls,", "string\") def setArgument(self, pdescr, args, pos, val): if val is None: args.setString(pos, None)", "def callStatic(self, meth, cls, args): value = meth.callStaticString(cls, args) return value def callInstance(self,", "convert value to Java string\") def toPython(self, val): if val is None: return", "fld, cls): return fld.getStaticString(cls) def setStatic(self, fld, cls, val): if val is None:", "if val is None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val,", "@cached def _class(self): return self._state.class_importer.java_lang_String def match(self, val): if val is None: return", "Java string\") def callStatic(self, meth, cls, args): value = meth.callStaticString(cls, args) return value", "val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java", "None) elif isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise", "fld.getStaticString(cls) def setStatic(self, fld, cls, val): if val is None: fld.setStaticString(cls, None) elif", "is None: return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass()", "val def getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self, fld, cls, val): if", "JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def match(self, val): if val is None:", "convert value to Java string\") def callStatic(self, meth, cls, args): value = meth.callStaticString(cls,", "from jvm.lib import cached from .._constants import EJavaType from .._constants import EMatch from", "import JVM from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = () def", "args) return value def callInstance(self, meth, this, args): value = meth.callInstanceString(this, args) return", "val is None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()):", "return value def callInstance(self, meth, this, args): value = meth.callInstanceString(this, args) return value", "raise TypeError(\"Cannot convert value to Java string\") def callStatic(self, meth, cls, args): value", "https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib import cached from .._constants import EJavaType", "val): if val is None: return None else: if isinstance(val, self._jt_jvm.JObject): val =", "val.__javaobject__ raise TypeError(\"Cannot convert value to Java string\") def toPython(self, val): if val", "return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class():", "@public class StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached", "meth.callStaticString(cls, args) return value def callInstance(self, meth, this, args): value = meth.callInstanceString(this, args)", "val): if val is None: return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif", "fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def setArgument(self, pdescr,", "return self._state.class_importer.java_lang_String def match(self, val): if val is None: return EMatch.IMPLICIT elif isinstance(val,", "isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def", "= meth.callStaticString(cls, args) return value def callInstance(self, meth, this, args): value = meth.callInstanceString(this,", "def getInstance(self, fld, this): return fld.getString(this) def setInstance(self, fld, this, val): if val", "== self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self, val): if val is None:", "public from jvm.lib import cached from .._constants import EJavaType from .._constants import EMatch", "else: raise TypeError(\"Cannot convert value to Java string\") def setArgument(self, pdescr, args, pos,", "fld, this): return fld.getString(this) def setInstance(self, fld, this, val): if val is None:", "None) elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise", "fld, this, val): if val is None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this,", "from jvm.lib import public from jvm.lib import cached from .._constants import EJavaType from", "from .._jvm import JVM from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ =", "else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val def getStatic(self, fld, cls):", "from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state):", "string\") def callStatic(self, meth, cls, args): value = meth.callStaticString(cls, args) return value def", "fld, cls, val): if val is None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls,", "elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__", "def _class(self): return self._state.class_importer.java_lang_String def match(self, val): if val is None: return EMatch.IMPLICIT", "is None: return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass()", "def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def match(self,", "callStatic(self, meth, cls, args): value = meth.callStaticString(cls, args) return value def callInstance(self, meth,", "cls, val): if val is None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val)", "else: raise TypeError(\"Cannot convert value to Java string\") def callStatic(self, meth, cls, args):", "convert value to Java string\") def setArgument(self, pdescr, args, pos, val): if val", "str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE", ".._constants import EJavaType from .._constants import EMatch from .._jvm import JVM from ._base_handler", "convert value to Java string\") def getInstance(self, fld, this): return fld.getString(this) def setInstance(self,", "elif isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot", "Java string\") def toPython(self, val): if val is None: return None else: if", "None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__)", "import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state): super().__init__(state, EJavaType.STRING,", "from .._constants import EMatch from .._jvm import JVM from ._base_handler import _ObjectHandler @public", "args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value to", "if val is None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val) elif isinstance(val,", "val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def setArgument(self, pdescr, args,", "str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value", "str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot", "if val is None: return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val,", "the MIT License # https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib import cached", "value to Java string\") def setArgument(self, pdescr, args, pos, val): if val is", "pos, val): if val is None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val)", "None) elif isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise", "self._state.class_importer.java_lang_String def match(self, val): if val is None: return EMatch.IMPLICIT elif isinstance(val, str):", "self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value", "isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert", "fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else:", "EMatch.NONE def toJava(self, val): if val is None: return None elif isinstance(val, str):", "== self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value to Java string\") def toPython(self,", "is None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this,", "TypeError(\"Cannot convert value to Java string\") def getInstance(self, fld, this): return fld.getString(this) def", "self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self, val): if val", "is None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls,", "def getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self, fld, cls, val): if val", "def setInstance(self, fld, this, val): if val is None: fld.setString(this, None) elif isinstance(val,", "raise TypeError(\"Cannot convert value to Java string\") def toPython(self, val): if val is", "None: fld.setString(this, None) elif isinstance(val, str): fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__)", "if val is None: return None else: if isinstance(val, self._jt_jvm.JObject): val = val.stringValue()", "elif isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot", "to Java string\") def callStatic(self, meth, cls, args): value = meth.callStaticString(cls, args) return", "self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def setArgument(self,", "self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value to Java string\")", "args): value = meth.callStaticString(cls, args) return value def callInstance(self, meth, this, args): value", "setStatic(self, fld, cls, val): if val is None: fld.setStaticString(cls, None) elif isinstance(val, str):", "def toPython(self, val): if val is None: return None else: if isinstance(val, self._jt_jvm.JObject):", "Java string\") def getInstance(self, fld, this): return fld.getString(this) def setInstance(self, fld, this, val):", "val is None: return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and", "val): if val is None: return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif", "def setStatic(self, fld, cls, val): if val is None: fld.setStaticString(cls, None) elif isinstance(val,", "self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def getInstance(self,", "val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java", "val): if val is None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val) elif", "isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def", "import EJavaType from .._constants import EMatch from .._jvm import JVM from ._base_handler import", "isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val def getStatic(self, fld, cls): return fld.getStaticString(cls)", "val = val.stringValue() return val def getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self,", "# https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib import cached from .._constants import", "if val is None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val) elif isinstance(val,", "_ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass())", ".._jvm import JVM from ._base_handler import _ObjectHandler @public class StringHandler(_ObjectHandler): __slots__ = ()", "StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self):", "fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def getInstance(self, fld,", "import cached from .._constants import EJavaType from .._constants import EMatch from .._jvm import", "isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise", "under the MIT License # https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib import", "return val def getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self, fld, cls, val):", "setArgument(self, pdescr, args, pos, val): if val is None: args.setString(pos, None) elif isinstance(val,", "fld.setStaticString(cls, val) elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value to", "Java string\") def setArgument(self, pdescr, args, pos, val): if val is None: args.setString(pos,", "val is None: return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and", "val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self, val): if val is", "toJava(self, val): if val is None: return None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val)", "License # https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib import cached from .._constants", "return fld.getString(this) def setInstance(self, fld, this, val): if val is None: fld.setString(this, None)", "elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value to", "_class(self): return self._state.class_importer.java_lang_String def match(self, val): if val is None: return EMatch.IMPLICIT elif", "MIT License # https://opensource.org/licenses/MIT from jvm.lib import public from jvm.lib import cached from", "2015-2022 <NAME> # Licensed under the MIT License # https://opensource.org/licenses/MIT from jvm.lib import", "to Java string\") def setArgument(self, pdescr, args, pos, val): if val is None:", "elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\")", "else: raise TypeError(\"Cannot convert value to Java string\") def getInstance(self, fld, this): return", "isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert", "return EMatch.NONE def toJava(self, val): if val is None: return None elif isinstance(val,", "fld.setString(this, val) elif isinstance(val, self._class()): fld.setObject(this, val.__javaobject__) else: raise TypeError(\"Cannot convert value to", "val is None: fld.setStaticString(cls, None) elif isinstance(val, str): fld.setStaticString(cls, val) elif isinstance(val, self._class()):", "jvm.lib import cached from .._constants import EJavaType from .._constants import EMatch from .._jvm", "return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE def", "isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self, val): if", "str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos, val.__javaobject__) else: raise TypeError(\"Cannot convert value", "class StringHandler(_ObjectHandler): __slots__ = () def __init__(self, state): super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def", "(c) 2015-2022 <NAME> # Licensed under the MIT License # https://opensource.org/licenses/MIT from jvm.lib", "None: return EMatch.IMPLICIT elif isinstance(val, str): return EMatch.PERFECT elif isinstance(val, self._class()):#and val.__javaobject__.getClass() ==", "val): if val is None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val) elif", "this): return fld.getString(this) def setInstance(self, fld, this, val): if val is None: fld.setString(this,", "EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def match(self, val): if val is", "getStatic(self, fld, cls): return fld.getStaticString(cls) def setStatic(self, fld, cls, val): if val is", "raise TypeError(\"Cannot convert value to Java string\") def setArgument(self, pdescr, args, pos, val):", "cached from .._constants import EJavaType from .._constants import EMatch from .._jvm import JVM", "None elif isinstance(val, str): return self._jt_jvm.JObject.newString(val) elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return", "val.__javaobject__.getClass() == self._class(): return val.__javaobject__ raise TypeError(\"Cannot convert value to Java string\") def", "elif isinstance(val, self._class()): fld.setStaticObject(cls, val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\")", "if isinstance(val, self._jt_jvm.JObject): val = val.stringValue() return val def getStatic(self, fld, cls): return", "is None: args.setString(pos, None) elif isinstance(val, str): args.setString(pos, val) elif isinstance(val, self._class()): args.setObject(pos,", "value to Java string\") def getInstance(self, fld, this): return fld.getString(this) def setInstance(self, fld,", "super().__init__(state, EJavaType.STRING, JVM.jvm.JClass.getStringClass()) @cached def _class(self): return self._state.class_importer.java_lang_String def match(self, val): if val", "<NAME> # Licensed under the MIT License # https://opensource.org/licenses/MIT from jvm.lib import public", "val.__javaobject__) else: raise TypeError(\"Cannot convert value to Java string\") def getInstance(self, fld, this):", "self._jt_jvm.JObject): val = val.stringValue() return val def getStatic(self, fld, cls): return fld.getStaticString(cls) def", "elif isinstance(val, self._class()):#and val.__javaobject__.getClass() == self._class(): return EMatch.PERFECT return EMatch.NONE def toJava(self, val):" ]
[ "countries_list = self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr for _country in", "settings from app.model import * def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value))", "rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class", "import redis from app.config import settings from app.model import * def set_cache(key, value):", "req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media =", "= { \"message\": \"Helloorld!\", \"country\": [_country.repr for _country in countries_list] } class RootNameResources:", "from app.config import settings from app.model import * def set_cache(key, value): rclient =", "on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list", "* def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient =", "uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\",", "return rclient.get(key) class RootResources: def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(),", "_country in countries_list] } class RootNameResources: def on_post(self, req, resp, name): resp.media =", "class RootNameResources: def on_post(self, req, resp, name): resp.media = { \"message\": \"Hello, {}!\".format(name.capitalize())", "= self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr for _country in countries_list]", "import uuid import json import redis from app.config import settings from app.model import", "in countries_list] } class RootNameResources: def on_post(self, req, resp, name): resp.media = {", "req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list =", "import json import redis from app.config import settings from app.model import * def", "self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = {", "{ \"message\": \"Helloorld!\", \"country\": [_country.repr for _country in countries_list] } class RootNameResources: def", "os import uuid import json import redis from app.config import settings from app.model", "get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self, req, resp): if", "def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self, req, resp):", "redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def", "self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr for _country in countries_list] }", "json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self, req,", "= redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries(", "} class RootNameResources: def on_post(self, req, resp, name): resp.media = { \"message\": \"Hello,", "app.model import * def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key):", "rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self,", "from app.model import * def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def", "[_country.repr for _country in countries_list] } class RootNameResources: def on_post(self, req, resp, name):", "redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id=", "json import redis from app.config import settings from app.model import * def set_cache(key,", "app.config import settings from app.model import * def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL'))", "def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL'))", "set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return", "value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key)", "class RootResources: def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name')", "\"Helloorld!\", \"country\": [_country.repr for _country in countries_list] } class RootNameResources: def on_post(self, req,", "def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit()", "<gh_stars>0 import os import uuid import json import redis from app.config import settings", "import os import uuid import json import redis from app.config import settings from", "RootNameResources: def on_post(self, req, resp, name): resp.media = { \"message\": \"Hello, {}!\".format(name.capitalize()) }", "redis from app.config import settings from app.model import * def set_cache(key, value): rclient", "req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr", ")) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr for", "resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr for _country in countries_list] } class", "rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources: def on_get(self, req, resp): if req.params.get('name'):", "rclient.get(key) class RootResources: def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name=", "\"country\": [_country.repr for _country in countries_list] } class RootNameResources: def on_post(self, req, resp,", "= redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) return rclient.get(key) class RootResources:", "if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media", "countries_list] } class RootNameResources: def on_post(self, req, resp, name): resp.media = { \"message\":", "country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = { \"message\":", "self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\": [_country.repr for _country", "resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all()", "uuid import json import redis from app.config import settings from app.model import *", "RootResources: def on_get(self, req, resp): if req.params.get('name'): self.sess.add(Countries( country_id= uuid.uuid4(), country_name= req.params.get('name') ))", "\"message\": \"Helloorld!\", \"country\": [_country.repr for _country in countries_list] } class RootNameResources: def on_post(self,", "for _country in countries_list] } class RootNameResources: def on_post(self, req, resp, name): resp.media", "import settings from app.model import * def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key,", "country_name= req.params.get('name') )) self.sess.commit() countries_list = self.sess.query(Countries).all() resp.media = { \"message\": \"Helloorld!\", \"country\":", "import * def set_cache(key, value): rclient = redis.Redis.from_url(settings.get('REDIS_URL')) rclient.set(key, json.dumps(value)) def get_cache(key): rclient" ]
[ "Define sizes input_size = 3 output_size = 2 hidden_size = 5 # Create", "perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) #", "https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size = 3 output_size = 2 hidden_size", "y. # See paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size = 3", "a Variational auto-encoder for the # missing y. # See paper: https://arxiv.org/abs/1312.6114 import", "for the # missing y. # See paper: https://arxiv.org/abs/1312.6114 import torch # Define", "hidden_size = 5 # Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn =", "torch.ones(20, 3) # I want this to be 20 x (5 + 5", "torch.randn(20, 5) b = torch.randn(3) * torch.ones(20, 3) # I want this to", "print(out) # Test dims y = torch.randn(20, 5) m = torch.randn(20, 5) b", "* torch.ones(20, 3) # I want this to be 20 x (5 +", "Main num_obs = 100 x = torch.randn(num_obs, input_size) out = fc1(x) out =", "20 x (5 + 5 + 3) input_vec = torch.cat([y, m, b], dim=-1).shape", "= torch.randn(20, 5) m = torch.randn(20, 5) b = torch.randn(3) * torch.ones(20, 3)", "# See paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size = 3 output_size", "out = act_fn(out) out = fc2(out) print(out) # Test dims y = torch.randn(20,", "dims y = torch.randn(20, 5) m = torch.randn(20, 5) b = torch.randn(3) *", "be 20 x (5 + 5 + 3) input_vec = torch.cat([y, m, b],", "# Test dims y = torch.randn(20, 5) m = torch.randn(20, 5) b =", "fc1(x) out = act_fn(out) out = fc2(out) print(out) # Test dims y =", "torch # Define sizes input_size = 3 output_size = 2 hidden_size = 5", "= 5 # Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh()", "100 x = torch.randn(num_obs, input_size) out = fc1(x) out = act_fn(out) out =", "= torch.randn(num_obs, input_size) out = fc1(x) out = act_fn(out) out = fc2(out) print(out)", "output_size = 2 hidden_size = 5 # Create multi-layer perceptron fc1 = torch.nn.Linear(input_size,", "fc2(out) print(out) # Test dims y = torch.randn(20, 5) m = torch.randn(20, 5)", "b = torch.randn(3) * torch.ones(20, 3) # I want this to be 20", "5) b = torch.randn(3) * torch.ones(20, 3) # I want this to be", "this to be 20 x (5 + 5 + 3) input_vec = torch.cat([y,", "to be 20 x (5 + 5 + 3) input_vec = torch.cat([y, m,", "Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size,", "= fc1(x) out = act_fn(out) out = fc2(out) print(out) # Test dims y", "= torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs", "# Define sizes input_size = 3 output_size = 2 hidden_size = 5 #", "multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size)", "= act_fn(out) out = fc2(out) print(out) # Test dims y = torch.randn(20, 5)", "= torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100 x = torch.randn(num_obs, input_size) out", "= 3 output_size = 2 hidden_size = 5 # Create multi-layer perceptron fc1", "# I want this to be 20 x (5 + 5 + 3)", "as a Variational auto-encoder for the # missing y. # See paper: https://arxiv.org/abs/1312.6114", "num_obs = 100 x = torch.randn(num_obs, input_size) out = fc1(x) out = act_fn(out)", "# Main num_obs = 100 x = torch.randn(num_obs, input_size) out = fc1(x) out", "See paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size = 3 output_size =", "# missing y. # See paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size", "intend to use this as a Variational auto-encoder for the # missing y.", "input_size = 3 output_size = 2 hidden_size = 5 # Create multi-layer perceptron", "x = torch.randn(num_obs, input_size) out = fc1(x) out = act_fn(out) out = fc2(out)", "= 2 hidden_size = 5 # Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size)", "# I intend to use this as a Variational auto-encoder for the #", "fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100 x = torch.randn(num_obs, input_size)", "torch.randn(num_obs, input_size) out = fc1(x) out = act_fn(out) out = fc2(out) print(out) #", "Test dims y = torch.randn(20, 5) m = torch.randn(20, 5) b = torch.randn(3)", "act_fn(out) out = fc2(out) print(out) # Test dims y = torch.randn(20, 5) m", "hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100", "out = fc2(out) print(out) # Test dims y = torch.randn(20, 5) m =", "# Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 =", "= 100 x = torch.randn(num_obs, input_size) out = fc1(x) out = act_fn(out) out", "paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size = 3 output_size = 2", "fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main", "torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs =", "torch.randn(20, 5) m = torch.randn(20, 5) b = torch.randn(3) * torch.ones(20, 3) #", "use this as a Variational auto-encoder for the # missing y. # See", "torch.randn(3) * torch.ones(20, 3) # I want this to be 20 x (5", "m = torch.randn(20, 5) b = torch.randn(3) * torch.ones(20, 3) # I want", "the # missing y. # See paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes", "out = fc1(x) out = act_fn(out) out = fc2(out) print(out) # Test dims", "this as a Variational auto-encoder for the # missing y. # See paper:", "sizes input_size = 3 output_size = 2 hidden_size = 5 # Create multi-layer", "5 # Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn = torch.nn.Tanh() fc2", "= torch.randn(3) * torch.ones(20, 3) # I want this to be 20 x", "I want this to be 20 x (5 + 5 + 3) input_vec", "import torch # Define sizes input_size = 3 output_size = 2 hidden_size =", "2 hidden_size = 5 # Create multi-layer perceptron fc1 = torch.nn.Linear(input_size, hidden_size) act_fn", "3) # I want this to be 20 x (5 + 5 +", "torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100 x = torch.randn(num_obs,", "3 output_size = 2 hidden_size = 5 # Create multi-layer perceptron fc1 =", "auto-encoder for the # missing y. # See paper: https://arxiv.org/abs/1312.6114 import torch #", "want this to be 20 x (5 + 5 + 3) input_vec =", "torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100 x = torch.randn(num_obs, input_size) out =", "output_size) # Main num_obs = 100 x = torch.randn(num_obs, input_size) out = fc1(x)", "to use this as a Variational auto-encoder for the # missing y. #", "act_fn = torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100 x", "input_size) out = fc1(x) out = act_fn(out) out = fc2(out) print(out) # Test", "missing y. # See paper: https://arxiv.org/abs/1312.6114 import torch # Define sizes input_size =", "Variational auto-encoder for the # missing y. # See paper: https://arxiv.org/abs/1312.6114 import torch", "= torch.nn.Tanh() fc2 = torch.nn.Linear(hidden_size, output_size) # Main num_obs = 100 x =", "y = torch.randn(20, 5) m = torch.randn(20, 5) b = torch.randn(3) * torch.ones(20,", "5) m = torch.randn(20, 5) b = torch.randn(3) * torch.ones(20, 3) # I", "I intend to use this as a Variational auto-encoder for the # missing", "= fc2(out) print(out) # Test dims y = torch.randn(20, 5) m = torch.randn(20,", "= torch.randn(20, 5) b = torch.randn(3) * torch.ones(20, 3) # I want this" ]
[ "# # Copyright (c) 2020-2021 Pinecone Systems Inc. All right reserved. # from", "PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException,", "OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ =", "Pinecone Systems Inc. All right reserved. # from .core.exceptions import PineconeException, PineconeProtocolError from", "\\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\",", "Inc. All right reserved. # from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import", "ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\", \"ApiException\",", "ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\",", "ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\",", ".core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError,", "All right reserved. # from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException,", "ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\",", "reserved. # from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError,", "from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\", "PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException,", "import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__", "# Copyright (c) 2020-2021 Pinecone Systems Inc. All right reserved. # from .core.exceptions", "from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException,", "__all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\", \"ApiException\", \"NotFoundException\", \"UnauthorizedException\",", "# from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError,", "Copyright (c) 2020-2021 Pinecone Systems Inc. All right reserved. # from .core.exceptions import", "= [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\", \"ApiException\", \"NotFoundException\", \"UnauthorizedException\", \"ForbiddenException\",", "(c) 2020-2021 Pinecone Systems Inc. All right reserved. # from .core.exceptions import PineconeException,", "ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [", "\"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\", \"ApiException\", \"NotFoundException\", \"UnauthorizedException\", \"ForbiddenException\", \"ServiceException\", ]", ".core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException", "right reserved. # from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError,", "ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException, NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\",", "ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\", \"ApiException\", \"NotFoundException\",", "Systems Inc. All right reserved. # from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions", "import PineconeException, PineconeProtocolError from .core.client.exceptions import OpenApiException, ApiAttributeError, ApiTypeError, ApiValueError, \\ ApiKeyError, ApiException,", "2020-2021 Pinecone Systems Inc. All right reserved. # from .core.exceptions import PineconeException, PineconeProtocolError", "NotFoundException, UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\",", "[ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\", \"ApiException\", \"NotFoundException\", \"UnauthorizedException\", \"ForbiddenException\", \"ServiceException\",", "UnauthorizedException, ForbiddenException, ServiceException __all__ = [ \"PineconeException\", \"PineconeProtocolError\", \"OpenApiException\", \"ApiAttributeError\", \"ApiTypeError\", \"ApiValueError\", \"ApiKeyError\"," ]
[ "<gh_stars>0 from pydantic import BaseModel class UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title :", "BaseModel class UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title : str description : str", "UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title : str description : str photo_url :", "str class BaseComplaint(BaseModel): title : str description : str photo_url : str amount:", "from pydantic import BaseModel class UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title : str", "class UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title : str description : str photo_url", "import BaseModel class UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title : str description :", "email: str class BaseComplaint(BaseModel): title : str description : str photo_url : str", "class BaseComplaint(BaseModel): title : str description : str photo_url : str amount: float", "pydantic import BaseModel class UserBase(BaseModel): email: str class BaseComplaint(BaseModel): title : str description" ]
[ "priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id:", "app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment", "environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database URL is required if 'FIREBASE_DATABASE_URL'", "models.profile import Profile from os import environ from views import init_views import firebase_admin", "# Environment variables load_dotenv() # Flask app app = Flask(__name__) # Authentication if", "'?next=' + request.path) # Routes init_views(app) init_api(app) # Run app if __name__ ==", "default_backend from cryptography.hazmat.primitives import serialization from dotenv import load_dotenv from firebase_admin import credentials,", "environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager()", "get database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) #", "controllers import init_api from controllers.database import Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives", "environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase:", "{ 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA cipher for decrypting credentials", "firebase_admin import credentials, db from flask import Flask, redirect, request, url_for from flask_login", "for decrypting credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>,", "Environment variables load_dotenv() # Flask app app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY'", "cipher for decrypting credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(),", "redirect, request, url_for from flask_login import LoginManager from models.profile import Profile from os", "Parse credentials.json (not in tree, must be supplied) cred = credentials.Certificate('credentials.json') # Firebase:", "= environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database URL is required if", "Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not", "ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA cipher for", "Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') +", "RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) #", "cryptography.hazmat.primitives import serialization from dotenv import load_dotenv from firebase_admin import credentials, db from", "environ from views import init_views import firebase_admin # Environment variables load_dotenv() # Flask", "must be supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize and get database root", "root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA cipher", "'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA cipher for decrypting credentials with", "Database URL is required if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable", "import credentials, db from flask import Flask, redirect, request, url_for from flask_login import", "raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app)", "init_views import firebase_admin # Environment variables load_dotenv() # Flask app app = Flask(__name__)", "= Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable", "for GoVLê from controllers import init_api from controllers.database import Database from cryptography.hazmat.backends import", "in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase: Parse credentials.json (not", "import load_dotenv from firebase_admin import credentials, db from flask import Flask, redirect, request,", "file for GoVLê from controllers import init_api from controllers.database import Database from cryptography.hazmat.backends", "is required if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set')", "import environ from views import init_views import firebase_admin # Environment variables load_dotenv() #", "variables load_dotenv() # Flask app app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not", "# Firebase: Parse credentials.json (not in tree, must be supplied) cred = credentials.Certificate('credentials.json')", "LoginManager() login_manager.init_app(app) # Firebase: Database URL is required if 'FIREBASE_DATABASE_URL' not in environ:", "from flask import Flask, redirect, request, url_for from flask_login import LoginManager from models.profile", "credentials.json (not in tree, must be supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize", "+ request.path) # Routes init_views(app) init_api(app) # Run app if __name__ == '__main__':", "from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from dotenv import load_dotenv from", "decrypting credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend())", "open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader", "error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path) # Routes", "Firebase: Database URL is required if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment", "database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA", "# Firebase: Initialize and get database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] })", "Firebase: Initialize and get database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB']", "from flask_login import LoginManager from models.profile import Profile from os import environ from", "flask_login import LoginManager from models.profile import Profile from os import environ from views", "from os import environ from views import init_views import firebase_admin # Environment variables", "supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize and get database root ref firebase_admin.initialize_app(cred,", "'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY']", "app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' +", "import firebase_admin # Environment variables load_dotenv() # Flask app app = Flask(__name__) #", "controllers.database import Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from dotenv", "environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase: Parse credentials.json (not in", "Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path) #", "firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA cipher for decrypting", "Main application file for GoVLê from controllers import init_api from controllers.database import Database", "# User loader @login_manager.user_loader def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized", "Firebase: Parse credentials.json (not in tree, must be supplied) cred = credentials.Certificate('credentials.json') #", "import serialization from dotenv import load_dotenv from firebase_admin import credentials, db from flask", "Authentication if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key", "Profile from os import environ from views import init_views import firebase_admin # Environment", "set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database URL is", "# Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path)", "credentials.Certificate('credentials.json') # Firebase: Initialize and get database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL']", "credentials, db from flask import Flask, redirect, request, url_for from flask_login import LoginManager", "from dotenv import load_dotenv from firebase_admin import credentials, db from flask import Flask,", "not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase: Parse credentials.json", "def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path) # Routes init_views(app) init_api(app) #", "import init_api from controllers.database import Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import", "with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User", "variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database", "Flask app app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in environ: raise", "in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager =", "return redirect(url_for('login.login_page') + '?next=' + request.path) # Routes init_views(app) init_api(app) # Run app", "url_for from flask_login import LoginManager from models.profile import Profile from os import environ", "<filename>app.py # Main application file for GoVLê from controllers import init_api from controllers.database", "= LoginManager() login_manager.init_app(app) # Firebase: Database URL is required if 'FIREBASE_DATABASE_URL' not in", "redirect(url_for('login.login_page') + '?next=' + request.path) # Routes init_views(app) init_api(app) # Run app if", "loader @login_manager.user_loader def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler", "views import init_views import firebase_admin # Environment variables load_dotenv() # Flask app app", "not set') # Firebase: Parse credentials.json (not in tree, must be supplied) cred", "# Firebase: Database URL is required if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL", "serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id: str) -> Profile:", "}) app.config['DB'] = Database(db.reference()) # RSA cipher for decrypting credentials with open('priv.pem', 'rb')", "from views import init_views import firebase_admin # Environment variables load_dotenv() # Flask app", "= credentials.Certificate('credentials.json') # Firebase: Initialize and get database root ref firebase_admin.initialize_app(cred, { 'databaseURL':", "'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase: Parse", "and get database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference())", "Initialize and get database root ref firebase_admin.initialize_app(cred, { 'databaseURL': environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] =", "Flask, redirect, request, url_for from flask_login import LoginManager from models.profile import Profile from", "request, url_for from flask_login import LoginManager from models.profile import Profile from os import", "= serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id: str) ->", "os import environ from views import init_views import firebase_admin # Environment variables load_dotenv()", "-> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page')", "application file for GoVLê from controllers import init_api from controllers.database import Database from", "variable not set') # Firebase: Parse credentials.json (not in tree, must be supplied)", "'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader", "import default_backend from cryptography.hazmat.primitives import serialization from dotenv import load_dotenv from firebase_admin import", "as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def", "import Flask, redirect, request, url_for from flask_login import LoginManager from models.profile import Profile", "if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key =", "if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase:", "be supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize and get database root ref", "@login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path) # Routes init_views(app) init_api(app)", "(not in tree, must be supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize and", "str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return", "app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database URL is required", "password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id)", "app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id: str)", "import init_views import firebase_admin # Environment variables load_dotenv() # Flask app app =", "dotenv import load_dotenv from firebase_admin import credentials, db from flask import Flask, redirect,", "user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler():", "not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager", "from cryptography.hazmat.primitives import serialization from dotenv import load_dotenv from firebase_admin import credentials, db", "@login_manager.user_loader def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler", "from controllers import init_api from controllers.database import Database from cryptography.hazmat.backends import default_backend from", "not set') app.secret_key = environ['FLASK_SECRET_KEY'] login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database URL", "URL is required if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not", "load_dotenv from firebase_admin import credentials, db from flask import Flask, redirect, request, url_for", "# Authentication if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY environment variable not set')", "from firebase_admin import credentials, db from flask import Flask, redirect, request, url_for from", "app app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in environ: raise RuntimeError('FLASK_SECRET_KEY", "User loader @login_manager.user_loader def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error", "import Profile from os import environ from views import init_views import firebase_admin #", "Database(db.reference()) # RSA cipher for decrypting credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER']", "login_manager.init_app(app) # Firebase: Database URL is required if 'FIREBASE_DATABASE_URL' not in environ: raise", "request.path) # Routes init_views(app) init_api(app) # Run app if __name__ == '__main__': app.run(debug=True)", "priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id: str) -> Profile: return", "backend=default_backend()) # User loader @login_manager.user_loader def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) #", "RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase: Parse credentials.json (not in tree, must", "GoVLê from controllers import init_api from controllers.database import Database from cryptography.hazmat.backends import default_backend", "required if 'FIREBASE_DATABASE_URL' not in environ: raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') #", "serialization from dotenv import load_dotenv from firebase_admin import credentials, db from flask import", "in tree, must be supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize and get", "environ['FIREBASE_DATABASE_URL'] }) app.config['DB'] = Database(db.reference()) # RSA cipher for decrypting credentials with open('priv.pem',", "LoginManager from models.profile import Profile from os import environ from views import init_views", "load_dotenv() # Flask app app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in", "+ '?next=' + request.path) # Routes init_views(app) init_api(app) # Run app if __name__", "cred = credentials.Certificate('credentials.json') # Firebase: Initialize and get database root ref firebase_admin.initialize_app(cred, {", "= Database(db.reference()) # RSA cipher for decrypting credentials with open('priv.pem', 'rb') as priv_key_file:", "Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from dotenv import load_dotenv", "# Flask app app = Flask(__name__) # Authentication if 'FLASK_SECRET_KEY' not in environ:", "# RSA cipher for decrypting credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] =", "from models.profile import Profile from os import environ from views import init_views import", "from controllers.database import Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from", "db from flask import Flask, redirect, request, url_for from flask_login import LoginManager from", "login_manager = LoginManager() login_manager.init_app(app) # Firebase: Database URL is required if 'FIREBASE_DATABASE_URL' not", "credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key( priv_key_file.read(), password=<PASSWORD>, backend=default_backend()) #", "app.config['DB'] = Database(db.reference()) # RSA cipher for decrypting credentials with open('priv.pem', 'rb') as", "import Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from dotenv import", "handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path) # Routes init_views(app)", "raise RuntimeError('FIREBASE_DATABASE_URL environment variable not set') # Firebase: Parse credentials.json (not in tree,", "RSA cipher for decrypting credentials with open('priv.pem', 'rb') as priv_key_file: app.config['RSA_CIPHER'] = serialization.load_pem_private_key(", "# Main application file for GoVLê from controllers import init_api from controllers.database import", "flask import Flask, redirect, request, url_for from flask_login import LoginManager from models.profile import", "import LoginManager from models.profile import Profile from os import environ from views import", "def user_loader(user_id: str) -> Profile: return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def", "unauthorized_handler(): return redirect(url_for('login.login_page') + '?next=' + request.path) # Routes init_views(app) init_api(app) # Run", "return app.config['DB'].lookup_user_by_id(user_id) # Unauthorized error handler @login_manager.unauthorized_handler def unauthorized_handler(): return redirect(url_for('login.login_page') + '?next='", "cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from dotenv import load_dotenv from firebase_admin", "environment variable not set') # Firebase: Parse credentials.json (not in tree, must be", "tree, must be supplied) cred = credentials.Certificate('credentials.json') # Firebase: Initialize and get database", "set') # Firebase: Parse credentials.json (not in tree, must be supplied) cred =", "init_api from controllers.database import Database from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization", "firebase_admin # Environment variables load_dotenv() # Flask app app = Flask(__name__) # Authentication" ]
[ "turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg", "- invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x self._y =", "(self.XMIN <= goal_x <= self.XMAX) or not (self.YMIN <= goal_y <= self.YMAX): message", "= False out_of_boundaries = False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) <", "agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer:", "abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position is already at the", "\" received\") goal_x = goal.x goal_y = goal.y success = False preempted =", "with robot at ({}, {} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback =", "self._yaw = None self._received_position = False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist,", "started with robot at ({}, {} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback", "send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \"", "= True rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist() while not", "else: message = \"Preempted and stopped execution\" preempted = True break if not", "position...\") while True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started with", "= None self._y = None self._yaw = None self._received_position = False self._trajectory_length =", "feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) + \"", "< self.TOLERANCE: message = \"Success - reached the goal\" success = True break", "# parameters self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN =", "desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message = \"Success - reached", "invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist() while", "goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x self._y = pose_message.y self._yaw", "self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR", "robot at ({}, {} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback()", "or not (self.YMIN <= goal_y <= self.YMAX): message = \"Invalid goal position\" invalid_parameters", "= 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x = None self._y =", "self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x = None self._y", "= False invalid_parameters = False out_of_boundaries = False message = \"\" if abs(math.sqrt((goal_x-self._x)**2", "\"Success - reached the goal\" success = True break else: self._trajectory_length += diff", "0 self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal", "self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received position\")", "TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE", "True break if not (self.XMIN <= self._x <= self.XMAX) or not (self.YMIN <=", "= 3.0 self._x = None self._y = None self._yaw = None self._received_position =", "message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position", "and not success and not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2", "if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out", "agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False)", "= goal.x goal_y = goal.y success = False preempted = False invalid_parameters =", "None self._yaw = None self._received_position = False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\",", "= 1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x =", "= rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received position\") break", "agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as =", "+ \" received\") goal_x = goal.x goal_y = goal.y success = False preempted", "3.0 self._x = None self._y = None self._yaw = None self._received_position = False", "already at goal position\" success = True break else: message = \"Preempted and", "parameters self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN = 1.0", "= MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result to client\") if preempted: rospy.loginfo(\"Preempted\")", "not success and not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 +", "rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\")", "<= self.YMAX): message = \"Invalid goal position\" invalid_parameters = True rate = rospy.Rate(10.0)", "+ (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message = \"Success", "elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else:", "goal.x goal_y = goal.y success = False preempted = False invalid_parameters = False", "goal_x <= self.XMAX) or not (self.YMIN <= goal_y <= self.YMAX): message = \"Invalid", "not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE:", "self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x", "self._trajectory_length = 0.0 velocity_message = Twist() while not rospy.is_shutdown() and not success and", "to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted", "<= goal_y <= self.YMAX): message = \"Invalid goal position\" invalid_parameters = True rate", "has been started with robot at ({}, {} | {})\".format(self._x, self._y, self._yaw)) def", "= None self._received_position = False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10)", "goal_y = goal.y success = False preempted = False invalid_parameters = False out_of_boundaries", "success = True if not (self.XMIN <= goal_x <= self.XMAX) or not (self.YMIN", "math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message = \"Success - reached the goal\"", "update self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) #", "\"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position is already", "velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback() rate.sleep() # stop", "= Twist() while not rospy.is_shutdown() and not success and not invalid_parameters and not", "import Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import", "goal_y <= self.YMAX): message = \"Invalid goal position\" invalid_parameters = True rate =", "to goal linear_speed = self.K_LINEAR * diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x", "invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x self._y = pose_message.y", "actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN = 1.0", "import math from geometry_msgs.msg import Twist from turtlesim.msg import Pose from agv_motion.msg import", "and stopped execution\" preempted = True break if not (self.XMIN <= self._x <=", "\"Invalid goal position\" invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message", "math from geometry_msgs.msg import Twist from turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction", "rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted -", "= False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\",", "= False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message =", "out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but", "self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after", "angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x =", "self._y <= self.YMAX): message = \"Out of boundaries\" out_of_boundaries = True break diff", "self.TOLERANCE: message = \"Success - reached the goal\" success = True break else:", "goal position\" invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message =", "not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted", "self.TOLERANCE: message = \"Current position is already at the goal\" success = True", "self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self,", "rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist() while not rospy.is_shutdown() and not success", "out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self,", "rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x self._y", "position\" invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist()", "else: self._trajectory_length += diff # execute move to goal linear_speed = self.K_LINEAR *", "(goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but already at goal position\" success =", "result result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result to client\") if", "MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction,", "<= self._y <= self.YMAX): message = \"Out of boundaries\" out_of_boundaries = True break", "# execute move to goal linear_speed = self.K_LINEAR * diff angular_speed = self.K_ANGULAR", "+ (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but already at goal position\" success", "= 10.0 self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR =", "pose_message.theta self._received_position = True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") # , anonymous=True server", "def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal", "MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result)", "rospy.is_shutdown() and not success and not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if", "goal_x-self._x) if diff < self.TOLERANCE: message = \"Success - reached the goal\" success", "parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x self._y = pose_message.y self._yaw =", "message = \"Preempted but already at goal position\" success = True break else:", "<reponame>miroslavradojevic/agv_motion #!/usr/bin/env python import rospy import actionlib import math from geometry_msgs.msg import Twist", "actionlib import math from geometry_msgs.msg import Twist from turtlesim.msg import Pose from agv_motion.msg", "0.0 velocity_message = Twist() while not rospy.is_shutdown() and not success and not invalid_parameters", "class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters", "= True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if", "self.K_LINEAR * diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z =", "True rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist() while not rospy.is_shutdown()", "= 1.0 self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR =", "= actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN =", "Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has", "rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server", "execution\" preempted = True break if not (self.XMIN <= self._x <= self.XMAX) or", "MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self):", "{})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def", "< self.TOLERANCE: message = \"Preempted but already at goal position\" success = True", "= rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist() while not rospy.is_shutdown() and not", "queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received", "at goal position\" success = True break else: message = \"Preempted and stopped", "= True break if not (self.XMIN <= self._x <= self.XMAX) or not (self.YMIN", "execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX =", "from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class", "goal\" success = True if not (self.XMIN <= goal_x <= self.XMAX) or not", "self._trajectory_length += diff # execute move to goal linear_speed = self.K_LINEAR * diff", "Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position:", "= True if not (self.XMIN <= goal_x <= self.XMAX) or not (self.YMIN <=", "\"Preempted but already at goal position\" success = True break else: message =", "= MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal)", "(self.YMIN <= goal_y <= self.YMAX): message = \"Invalid goal position\" invalid_parameters = True", "Twist() while not rospy.is_shutdown() and not success and not invalid_parameters and not out_of_boundaries:", "False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current", "def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE =", "rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started with robot at ({}, {} |", "out_of_boundaries = False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message", "True break else: self._trajectory_length += diff # execute move to goal linear_speed =", "angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) #", "diff # execute move to goal linear_speed = self.K_LINEAR * diff angular_speed =", "1.0 self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0", "import rospy import actionlib import math from geometry_msgs.msg import Twist from turtlesim.msg import", "stopped execution\" preempted = True break if not (self.XMIN <= self._x <= self.XMAX)", "goal_x = goal.x goal_y = goal.y success = False preempted = False invalid_parameters", "rospy.loginfo(\"Goal \" + str(goal) + \" received\") goal_x = goal.x goal_y = goal.y", "success and not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2))", "= \"Preempted but already at goal position\" success = True break else: message", "= \"Out of boundaries\" out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2))", "diff < self.TOLERANCE: message = \"Success - reached the goal\" success = True", "break rospy.loginfo(\"Server has been started with robot at ({}, {} | {})\".format(self._x, self._y,", "preempted = True break if not (self.XMIN <= self._x <= self.XMAX) or not", "move to goal linear_speed = self.K_LINEAR * diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw)", "self._x = pose_message.x self._y = pose_message.y self._yaw = pose_message.theta self._received_position = True if", "#!/usr/bin/env python import rospy import actionlib import math from geometry_msgs.msg import Twist from", "import Twist from turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import", "= \"Current position is already at the goal\" success = True if not", "+= diff # execute move to goal linear_speed = self.K_LINEAR * diff angular_speed", "out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\")", "= False preempted = False invalid_parameters = False out_of_boundaries = False message =", "result.message = message rospy.loginfo(\"Send goal result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif", "10.0 self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0", "message = \"Current position is already at the goal\" success = True if", "position is already at the goal\" success = True if not (self.XMIN <=", "diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message)", "break else: message = \"Preempted and stopped execution\" preempted = True break if", "\"Current position is already at the goal\" success = True if not (self.XMIN", "< self.TOLERANCE: message = \"Current position is already at the goal\" success =", "is already at the goal\" success = True if not (self.XMIN <= goal_x", "MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) +", "self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0", "geometry_msgs.msg import Twist from turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg", "0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult() result.message =", "velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult()", "+ str(goal) + \" received\") goal_x = goal.x goal_y = goal.y success =", "Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult", "already at the goal\" success = True if not (self.XMIN <= goal_x <=", "if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position is already at", "stop velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result result =", "* (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each", "False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose,", "= \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position is", "diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE:", "\"Out of boundaries\" out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal", "if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but already at goal", "def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) + \" received\") goal_x = goal.x", "abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message =", "invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message", "self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x =", "= self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish", "self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN", "boundaries\" out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y,", "result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result to client\") if preempted:", "been started with robot at ({}, {} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self):", "self.TOLERANCE: message = \"Preempted but already at goal position\" success = True break", "= pose_message.y self._yaw = pose_message.theta self._received_position = True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\")", "rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if", "0.01 self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX = 10.0", "rate = rospy.Rate(10.0) self._trajectory_length = 0.0 velocity_message = Twist() while not rospy.is_shutdown() and", "self.XMAX) or not (self.YMIN <= goal_y <= self.YMAX): message = \"Invalid goal position\"", "True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff", "self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send", "self._received_position = False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub =", "elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal", "publish after each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z =", "pose_message.x self._y = pose_message.y self._yaw = pose_message.theta self._received_position = True if __name__ ==", "= math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message = \"Success - reached the", "success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted", "self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) + \" received\") goal_x =", "self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal):", "self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX", "= 1.0 self.K_ANGULAR = 3.0 self._x = None self._y = None self._yaw =", "on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) + \" received\") goal_x = goal.x goal_y", "= rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while True:", "position\" success = True break else: message = \"Preempted and stopped execution\" preempted", "not (self.XMIN <= goal_x <= self.XMAX) or not (self.YMIN <= goal_y <= self.YMAX):", "goal result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif", "python import rospy import actionlib import math from geometry_msgs.msg import Twist from turtlesim.msg", "position\") break rospy.loginfo(\"Server has been started with robot at ({}, {} | {})\".format(self._x,", "= \"Preempted and stopped execution\" preempted = True break if not (self.XMIN <=", "or not (self.YMIN <= self._y <= self.YMAX): message = \"Out of boundaries\" out_of_boundaries", "= abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message", "pose_message.y self._yaw = pose_message.theta self._received_position = True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") #", "= message rospy.loginfo(\"Send goal result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success:", "self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result)", "def pose_callback(self, pose_message): self._x = pose_message.x self._y = pose_message.y self._yaw = pose_message.theta self._received_position", "MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() #", "break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff <", "success = True break else: message = \"Preempted and stopped execution\" preempted =", "{} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length", "break if not (self.XMIN <= self._x <= self.XMAX) or not (self.YMIN <= self._y", "self.pose_callback) rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been", "self.XMAX) or not (self.YMIN <= self._y <= self.YMAX): message = \"Out of boundaries\"", "velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback()", "= angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x", "1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x = None", "<= self._x <= self.XMAX) or not (self.YMIN <= self._y <= self.YMAX): message =", "goal\" success = True break else: self._trajectory_length += diff # execute move to", "else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x", "= goal.y success = False preempted = False invalid_parameters = False out_of_boundaries =", "message = \"Out of boundaries\" out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2 +", "= 0 self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send", "Twist from turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback", "rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result)", "<= self.XMAX) or not (self.YMIN <= self._y <= self.YMAX): message = \"Out of", "self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback)", "result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries:", "False preempted = False invalid_parameters = False out_of_boundaries = False message = \"\"", "0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\")", "(goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position is already at the goal\" success", "linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback() rate.sleep() #", "= 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting", "preempted = False invalid_parameters = False out_of_boundaries = False message = \"\" if", "message = \"Preempted and stopped execution\" preempted = True break if not (self.XMIN", "self.YMAX): message = \"Out of boundaries\" out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2", "boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message): self._x", "each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message)", "self._yaw = pose_message.theta self._received_position = True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") # ,", "abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but already at goal position\"", "not (self.YMIN <= goal_y <= self.YMAX): message = \"Invalid goal position\" invalid_parameters =", "import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def", "= 0.0 velocity_message = Twist() while not rospy.is_shutdown() and not success and not", "break else: self._trajectory_length += diff # execute move to goal linear_speed = self.K_LINEAR", "client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted -", "= True break else: message = \"Preempted and stopped execution\" preempted = True", "send result result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result to client\")", "= \"Success - reached the goal\" success = True break else: self._trajectory_length +=", "if not (self.XMIN <= goal_x <= self.XMAX) or not (self.YMIN <= goal_y <=", "message = \"Success - reached the goal\" success = True break else: self._trajectory_length", "self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started with robot at ({}, {}", "# stop velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result result", "goal.y success = False preempted = False invalid_parameters = False out_of_boundaries = False", "from agv_motion.msg import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as", "- out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def", "velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult() result.message = message", "rospy import actionlib import math from geometry_msgs.msg import Twist from turtlesim.msg import Pose", "= self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) + \" received\")", "from turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction from agv_motion.msg import MoveTurtlesimFeedback from", "self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid", "(self.YMIN <= self._y <= self.YMAX): message = \"Out of boundaries\" out_of_boundaries = True", "10.0 self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x = None self._y = None", "not rospy.is_shutdown() and not success and not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested():", "self._y = None self._yaw = None self._received_position = False self._trajectory_length = 0 self._turtlesim_pub", "* diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed", "| {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback)", "after each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z = 0", "self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub = rospy.Subscriber(\"/turtle1/pose\", Pose, self.pose_callback) rospy.logwarn(\"waiting position...\") while", "self._x = None self._y = None self._yaw = None self._received_position = False self._trajectory_length", "of boundaries\") self._as.set_aborted(result) else: rospy.loginfo(\"Aborted - invalid goal parameters\") self._as.set_aborted(result) def pose_callback(self, pose_message):", "velocity_message = Twist() while not rospy.is_shutdown() and not success and not invalid_parameters and", "reached the goal\" success = True break else: self._trajectory_length += diff # execute", "None self._received_position = False self._trajectory_length = 0 self._turtlesim_pub = rospy.Publisher(\"/turtle1/cmd_vel\", Twist, queue_size=10) self._turtlesim_sub", "not (self.XMIN <= self._x <= self.XMAX) or not (self.YMIN <= self._y <= self.YMAX):", "message = \"Invalid goal position\" invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length =", "({}, {} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length =", "self.YMAX): message = \"Invalid goal position\" invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length", "self.K_LINEAR = 1.0 self.K_ANGULAR = 3.0 self._x = None self._y = None self._yaw", "out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x)", "the goal\" success = True if not (self.XMIN <= goal_x <= self.XMAX) or", "True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") # , anonymous=True server = TurtlesimMotionServer() rospy.spin()", "self.K_ANGULAR = 3.0 self._x = None self._y = None self._yaw = None self._received_position", "+ (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Current position is already at the goal\"", "(self.XMIN <= self._x <= self.XMAX) or not (self.YMIN <= self._y <= self.YMAX): message", "False invalid_parameters = False out_of_boundaries = False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 +", "from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal,", "goal position\" success = True break else: message = \"Preempted and stopped execution\"", "= linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update self.send_feedback() rate.sleep()", "= 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult() result.message", "= True break else: self._trajectory_length += diff # execute move to goal linear_speed", "from geometry_msgs.msg import Twist from turtlesim.msg import Pose from agv_motion.msg import MoveTurtlesimAction from", "= None self._yaw = None self._received_position = False self._trajectory_length = 0 self._turtlesim_pub =", "and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message =", "MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX", "= self.K_LINEAR * diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z", "self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN", "if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started with robot at ({},", "\"Preempted and stopped execution\" preempted = True break if not (self.XMIN <= self._x", "not (self.YMIN <= self._y <= self.YMAX): message = \"Out of boundaries\" out_of_boundaries =", "# publish after each update self.send_feedback() rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z", "rospy.logwarn(\"waiting position...\") while True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started", "auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01 self.XMIN = 1.0 self.XMAX = 10.0", "# send result result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result to", "\" + str(goal) + \" received\") goal_x = goal.x goal_y = goal.y success", "the goal\" success = True break else: self._trajectory_length += diff # execute move", "pose_callback(self, pose_message): self._x = pose_message.x self._y = pose_message.y self._yaw = pose_message.theta self._received_position =", "= True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") # , anonymous=True server = TurtlesimMotionServer()", "of boundaries\" out_of_boundaries = True break diff = abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) desired_angle_goal =", "import MoveTurtlesimFeedback from agv_motion.msg import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\",", "feedback = MoveTurtlesimFeedback() feedback.trajectory_length = self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" +", "if not (self.XMIN <= self._x <= self.XMAX) or not (self.YMIN <= self._y <=", "pose_message): self._x = pose_message.x self._y = pose_message.y self._yaw = pose_message.theta self._received_position = True", "= 0.01 self.XMIN = 1.0 self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX =", "True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started with robot at", "<= self.YMAX): message = \"Out of boundaries\" out_of_boundaries = True break diff =", "str(goal) + \" received\") goal_x = goal.x goal_y = goal.y success = False", "message rospy.loginfo(\"Send goal result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\")", "rospy.loginfo(\"Server has been started with robot at ({}, {} | {})\".format(self._x, self._y, self._yaw))", "at the goal\" success = True if not (self.XMIN <= goal_x <= self.XMAX)", "preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result) elif out_of_boundaries: rospy.loginfo(\"Aborted - out of", "= pose_message.theta self._received_position = True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") # , anonymous=True", "self.XMAX = 10.0 self.YMIN = 1.0 self.YMAX = 10.0 self.K_LINEAR = 1.0 self.K_ANGULAR", "self._received_position = True if __name__ == '__main__': rospy.init_node(\"turtlesim_move_server\") # , anonymous=True server =", "if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but already", "execute move to goal linear_speed = self.K_LINEAR * diff angular_speed = self.K_ANGULAR *", "True if not (self.XMIN <= goal_x <= self.XMAX) or not (self.YMIN <= goal_y", "self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE: message = \"Preempted but already at", "at ({}, {} | {})\".format(self._x, self._y, self._yaw)) def send_feedback(self): feedback = MoveTurtlesimFeedback() feedback.trajectory_length", "True break else: message = \"Preempted and stopped execution\" preempted = True break", "while True: if self._received_position: rospy.loginfo(\"received position\") break rospy.loginfo(\"Server has been started with robot", "1.0 self.K_ANGULAR = 3.0 self._x = None self._y = None self._yaw = None", "received\") goal_x = goal.x goal_y = goal.y success = False preempted = False", "while not rospy.is_shutdown() and not success and not invalid_parameters and not out_of_boundaries: if", "self._turtlesim_pub.publish(velocity_message) # send result result = MoveTurtlesimResult() result.message = message rospy.loginfo(\"Send goal result", "import actionlib import math from geometry_msgs.msg import Twist from turtlesim.msg import Pose from", "goal linear_speed = self.K_LINEAR * diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x =", "invalid_parameters = False out_of_boundaries = False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2))", "None self._y = None self._yaw = None self._received_position = False self._trajectory_length = 0", "if diff < self.TOLERANCE: message = \"Success - reached the goal\" success =", "rospy.loginfo(\"Send goal result to client\") if preempted: rospy.loginfo(\"Preempted\") self._as.set_preempted(result) elif success: rospy.loginfo(\"Success\") self._as.set_succeeded(result)", "self._x <= self.XMAX) or not (self.YMIN <= self._y <= self.YMAX): message = \"Out", "__init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start() # parameters self.TOLERANCE = 0.01", "<= goal_x <= self.XMAX) or not (self.YMIN <= goal_y <= self.YMAX): message =", "= \"Invalid goal position\" invalid_parameters = True rate = rospy.Rate(10.0) self._trajectory_length = 0.0", "- reached the goal\" success = True break else: self._trajectory_length += diff #", "goal): rospy.loginfo(\"Goal \" + str(goal) + \" received\") goal_x = goal.x goal_y =", "rate.sleep() # stop velocity_message.linear.x = 0 velocity_message.angular.z = 0 self._turtlesim_pub.publish(velocity_message) # send result", "and not invalid_parameters and not out_of_boundaries: if self._as.is_preempt_requested(): if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) <", "(goal_y-self._y)**2)) desired_angle_goal = math.atan2(goal_y-self._y, goal_x-self._x) if diff < self.TOLERANCE: message = \"Success -", "success = True break else: self._trajectory_length += diff # execute move to goal", "= pose_message.x self._y = pose_message.y self._yaw = pose_message.theta self._received_position = True if __name__", "self._y = pose_message.y self._yaw = pose_message.theta self._received_position = True if __name__ == '__main__':", "linear_speed = self.K_LINEAR * diff angular_speed = self.K_ANGULAR * (desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed", "self._as.set_aborted(result) def pose_callback(self, pose_message): self._x = pose_message.x self._y = pose_message.y self._yaw = pose_message.theta", "False out_of_boundaries = False message = \"\" if abs(math.sqrt((goal_x-self._x)**2 + (goal_y-self._y)**2)) < self.TOLERANCE:", "but already at goal position\" success = True break else: message = \"Preempted", "import MoveTurtlesimResult class TurtlesimMotionServer: def __init__(self): self._as = actionlib.SimpleActionServer(\"/turtlesim_action\", MoveTurtlesimAction, execute_cb=self.on_goal, auto_start=False) self._as.start()", "<= self.XMAX) or not (self.YMIN <= goal_y <= self.YMAX): message = \"Invalid goal", "success = False preempted = False invalid_parameters = False out_of_boundaries = False message", "(desired_angle_goal-self._yaw) velocity_message.linear.x = linear_speed velocity_message.angular.z = angular_speed self._turtlesim_pub.publish(velocity_message) # publish after each update", "self._trajectory_length self._as.publish_feedback(feedback) def on_goal(self, goal): rospy.loginfo(\"Goal \" + str(goal) + \" received\") goal_x" ]
[ "return None if rows_cols == (0, 0): return None return rows_cols def terminal_size():", "Some shells may set these environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))", "filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not", "to cols, rows. def _break_words(string, char_limit): 'Lineate the string based on the passed-in", "passed-in character limit.' if len(string) <= char_limit: next_line = string string = ''", "fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except ImportError: return None if rows_cols", "= string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space] string = string[(last_space", "it to the output streams.' string = pre + string + post (cols,", "string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline + 1):] elif", "cols) while len(next_line) > 0 and next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:]", "dimensions using an IO Control system call.' try: import fcntl, termios packed =", "while len(string) > 0: (next_line, string) = _break_words(string, cols) while len(next_line) > 0", "'\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline +", "string + post while len(string) > 0: (cols, _) = terminal_size() (next_line, string)", "(next_line, string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print the string, broken into lines,", "# Some shells may set these environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS',", "not rows_cols: # Some shells may set these environment variables. rows_cols = (os.environ.get('LINES',", "the output streams.' string = pre + string + post (cols, _) =", "+ post (cols, _) = terminal_size() while len(string) > 0: (next_line, string) =", "the passed-in character limit.' if len(string) <= char_limit: next_line = string string =", "> 0: (next_line, string) = _break_words(string, cols) while len(next_line) > 0 and next_line[0]", "allow Curveship to run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or", "string[0:char_limit] string = string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space] string", "rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass", "environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses", "_break_words(string, char_limit): 'Lineate the string based on the passed-in character limit.' if len(string)", "present(string, out_streams, pre='', post='\\n\\n'): 'Print the string, broken into lines, to the output", "__author__ = '<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__ = 'ISC' __version__ =", "and next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces = '' i =", "i <= (cols - len(next_line))/2: spaces += ' ' i += 1 out_streams.write('", "out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center the output and print it", "terminal_size() while len(string) > 0: (next_line, string) = _break_words(string, cols) while len(next_line) >", "cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'):", "fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except ImportError:", "by <NAME> to allow Curveship to run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0)", "if not rows_cols: # Some shells may set these environment variables. rows_cols =", "it to cols, rows. def _break_words(string, char_limit): 'Lineate the string based on the", "\"\"\"Determine the terminal size or set a default size if that fails. From", "if len(string) <= char_limit: next_line = string string = '' elif '\\n' in", "import struct def ioctl_term_size(filed): 'Attempt to find terminal dimensions using an IO Control", "a default size if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME>", "post while len(string) > 0: (cols, _) = terminal_size() (next_line, string) = _break_words(string,", "system call.' try: import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols =", "int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols, rows. def _break_words(string, char_limit): 'Lineate the", "the string, broken into lines, to the output streams.' if len(string) == 0:", "out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center", "os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols, rows. def _break_words(string,", "first_newline = string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline + 1):] elif '", "+ 1):] elif ' ' not in string[0:char_limit]: next_line = string[0:char_limit] string =", "out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center the output and print it to", "ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols =", "to the output streams.' string = pre + string + post (cols, _)", "the string based on the passed-in character limit.' if len(string) <= char_limit: next_line", "run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not", "display the output text.' __author__ = '<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__", "try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass if", "character limit.' if len(string) <= char_limit: next_line = string string = '' elif", "while i <= (cols - len(next_line))/2: spaces += ' ' i += 1", "code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship to run in GNU Emacs.\"\"\"", "0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center the output and print", "' i += 1 out_streams.write(' ' + spaces + next_line) if len(string) >", "'0.5.0.0' __status__ = 'Development' import os import re import struct def ioctl_term_size(filed): 'Attempt", "terminal dimensions using an IO Control system call.' try: import fcntl, termios packed", "From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship to run in", "string based on the passed-in character limit.' if len(string) <= char_limit: next_line =", "string = pre + string + post while len(string) > 0: (cols, _)", "len(next_line))/2: spaces += ' ' i += 1 out_streams.write(' ' + spaces +", "== 0: return if string[-1:] == '\\n': post = re.sub('^[ \\t]+', '', post)", "'1234') rows_cols = struct.unpack('hh', packed) except ImportError: return None if rows_cols == (0,", "= string[(last_space + 1):] return (next_line, string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print", "int(rows_cols[0]) # Reverses it to cols, rows. def _break_words(string, char_limit): 'Lineate the string", "<= char_limit: next_line = string string = '' elif '\\n' in string[0:char_limit]: first_newline", "limit.' if len(string) <= char_limit: next_line = string string = '' elif '\\n'", "streams.' if len(string) == 0: return if string[-1:] == '\\n': post = re.sub('^[", "else: last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space] string = string[(last_space + 1):]", "import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except", "that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship to", "struct.unpack('hh', packed) except ImportError: return None if rows_cols == (0, 0): return None", "not in string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:] else: last_space = string[0:char_limit].rindex('", "return None return rows_cols def terminal_size(): \"\"\"Determine the terminal size or set a", "size or set a default size if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py", "import os import re import struct def ioctl_term_size(filed): 'Attempt to find terminal dimensions", "struct def ioctl_term_size(filed): 'Attempt to find terminal dimensions using an IO Control system", "string[-1:] == '\\n': post = re.sub('^[ \\t]+', '', post) string = pre +", "if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center the", "__license__ = 'ISC' __version__ = '0.5.0.0' __status__ = 'Development' import os import re", "= ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try: filed = os.open(os.ctermid(),", "len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center the output", "pre + string + post (cols, _) = terminal_size() while len(string) > 0:", "the output streams.' if len(string) == 0: return if string[-1:] == '\\n': post", "= '' elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline] string", "None if rows_cols == (0, 0): return None return rows_cols def terminal_size(): \"\"\"Determine", "_break_words(string, cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='',", "(os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols, rows.", "= _break_words(string, cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams,", "(cols - len(next_line))/2: spaces += ' ' i += 1 out_streams.write(' ' +", "0: (cols, _) = terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line) if len(string)", "- len(next_line))/2: spaces += ' ' i += 1 out_streams.write(' ' + spaces", "'', post) string = pre + string + post while len(string) > 0:", "except AttributeError: pass if not rows_cols: # Some shells may set these environment", "len(string) > 0: (cols, _) = terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line)", "return (next_line, string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print the string, broken into", "post='\\n'): 'Center the output and print it to the output streams.' string =", "rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try: filed =", "the terminal size or set a default size if that fails. From <NAME>'s", "= string[0:char_limit].rindex(' ') next_line = string[0:last_space] string = string[(last_space + 1):] return (next_line,", "(cols, _) = terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line) if len(string) >", "= string string = '' elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line", "rows_cols def terminal_size(): \"\"\"Determine the terminal size or set a default size if", "1):] elif ' ' not in string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:]", "_) = terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line) if len(string) > 0:", "size if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow", "packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except ImportError: return None", "fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship to run", "Curveship to run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2)", "os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not rows_cols: # Some", "= '' i = 1 while i <= (cols - len(next_line))/2: spaces +=", "= 1 while i <= (cols - len(next_line))/2: spaces += ' ' i", "+ string + post while len(string) > 0: (cols, _) = terminal_size() (next_line,", "> 0: out_streams.write('\\n') out_streams.write(string) def center(string, out_streams, pre='', post='\\n'): 'Center the output and", "ioctl_term_size(2) if not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed)", "IO Control system call.' try: import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234')", "= terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n')", "> 0 and next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces = ''", "if len(string) == 0: return if string[-1:] == '\\n': post = re.sub('^[ \\t]+',", "to the output streams.' if len(string) == 0: return if string[-1:] == '\\n':", "'Print the string, broken into lines, to the output streams.' if len(string) ==", "to run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if", "post (cols, _) = terminal_size() while len(string) > 0: (next_line, string) = _break_words(string,", "(next_line, string) = _break_words(string, cols) while len(next_line) > 0 and next_line[0] == '\\n':", "if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship", "next_line[1:] spaces = '' i = 1 while i <= (cols - len(next_line))/2:", "rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to", "= string[0:first_newline] string = string[(first_newline + 1):] elif ' ' not in string[0:char_limit]:", "def terminal_size(): \"\"\"Determine the terminal size or set a default size if that", "os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not rows_cols: #", "'\\n': out_streams.write('\\n') next_line = next_line[1:] spaces = '' i = 1 while i", "terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string)", "'<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__ = 'ISC' __version__ = '0.5.0.0' __status__", "os.close(filed) except AttributeError: pass if not rows_cols: # Some shells may set these", "= terminal_size() while len(string) > 0: (next_line, string) = _break_words(string, cols) while len(next_line)", "cols, rows. def _break_words(string, char_limit): 'Lineate the string based on the passed-in character", "termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except ImportError: return None if rows_cols ==", "may set these environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]),", "len(string) == 0: return if string[-1:] == '\\n': post = re.sub('^[ \\t]+', '',", "using an IO Control system call.' try: import fcntl, termios packed = fcntl.ioctl(filed,", "rows. def _break_words(string, char_limit): 'Lineate the string based on the passed-in character limit.'", "= string[0:char_limit] string = string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space]", "+ string + post (cols, _) = terminal_size() while len(string) > 0: (next_line,", "ImportError: return None if rows_cols == (0, 0): return None return rows_cols def", "2011 <NAME>' __license__ = 'ISC' __version__ = '0.5.0.0' __status__ = 'Development' import os", "= pre + string + post while len(string) > 0: (cols, _) =", "import re import struct def ioctl_term_size(filed): 'Attempt to find terminal dimensions using an", "string) = _break_words(string, cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def center(string,", "the output and print it to the output streams.' string = pre +", "output streams.' string = pre + string + post (cols, _) = terminal_size()", "string[(first_newline + 1):] elif ' ' not in string[0:char_limit]: next_line = string[0:char_limit] string", "elif ' ' not in string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:] else:", "<NAME>' __license__ = 'ISC' __version__ = '0.5.0.0' __status__ = 'Development' import os import", "0): return None return rows_cols def terminal_size(): \"\"\"Determine the terminal size or set", "= struct.unpack('hh', packed) except ImportError: return None if rows_cols == (0, 0): return", "'Attempt to find terminal dimensions using an IO Control system call.' try: import", "80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols, rows. def _break_words(string, char_limit):", "these environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) #", "out_streams.write('\\n') next_line = next_line[1:] spaces = '' i = 1 while i <=", "broken into lines, to the output streams.' if len(string) == 0: return if", "in string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline + 1):]", "= string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline + 1):] elif ' '", "= '<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__ = 'ISC' __version__ = '0.5.0.0'", "0: return if string[-1:] == '\\n': post = re.sub('^[ \\t]+', '', post) string", "' ' not in string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:] else: last_space", "0: (next_line, string) = _break_words(string, cols) while len(next_line) > 0 and next_line[0] ==", "== '\\n': post = re.sub('^[ \\t]+', '', post) string = pre + string", "1 while i <= (cols - len(next_line))/2: spaces += ' ' i +=", "next_line = string[0:last_space] string = string[(last_space + 1):] return (next_line, string) def present(string,", "= string[(first_newline + 1):] elif ' ' not in string[0:char_limit]: next_line = string[0:char_limit]", "next_line = string string = '' elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n')", "_) = terminal_size() while len(string) > 0: (next_line, string) = _break_words(string, cols) while", "if not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except", "= '0.5.0.0' __status__ = 'Development' import os import re import struct def ioctl_term_size(filed):", "spaces = '' i = 1 while i <= (cols - len(next_line))/2: spaces", "= os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not rows_cols:", "rows_cols = struct.unpack('hh', packed) except ImportError: return None if rows_cols == (0, 0):", "'' elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline] string =", "+= 1 out_streams.write(' ' + spaces + next_line) if len(string) > 0: out_streams.write('\\n')", "streams.' string = pre + string + post (cols, _) = terminal_size() while", "len(next_line) > 0 and next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces =", "string = pre + string + post (cols, _) = terminal_size() while len(string)", "' not in string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:] else: last_space =", "') next_line = string[0:last_space] string = string[(last_space + 1):] return (next_line, string) def", "the output text.' __author__ = '<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__ =", "(cols, _) = terminal_size() while len(string) > 0: (next_line, string) = _break_words(string, cols)", "+= ' ' i += 1 out_streams.write(' ' + spaces + next_line) if", "set a default size if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by", "= 'ISC' __version__ = '0.5.0.0' __status__ = 'Development' import os import re import", "string = string[(first_newline + 1):] elif ' ' not in string[0:char_limit]: next_line =", "char_limit): 'Lineate the string based on the passed-in character limit.' if len(string) <=", "25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols, rows. def", "output and print it to the output streams.' string = pre + string", "def present(string, out_streams, pre='', post='\\n\\n'): 'Print the string, broken into lines, to the", "lines, to the output streams.' if len(string) == 0: return if string[-1:] ==", "<NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship to run in GNU", "string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space] string = string[(last_space +", "AttributeError: pass if not rows_cols: # Some shells may set these environment variables.", "len(string) > 0: (next_line, string) = _break_words(string, cols) while len(next_line) > 0 and", "out_streams, pre='', post='\\n'): 'Center the output and print it to the output streams.'", "while len(next_line) > 0 and next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces", "len(string) <= char_limit: next_line = string string = '' elif '\\n' in string[0:char_limit]:", "or set a default size if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications", "output text.' __author__ = '<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__ = 'ISC'", "'Format and display the output text.' __author__ = '<NAME>' __copyright__ = 'Copyright 2011", "== '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces = '' i = 1 while", "output streams.' if len(string) == 0: return if string[-1:] == '\\n': post =", "def ioctl_term_size(filed): 'Attempt to find terminal dimensions using an IO Control system call.'", "rows_cols: # Some shells may set these environment variables. rows_cols = (os.environ.get('LINES', 25),", "+ 1):] return (next_line, string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print the string,", "terminal size or set a default size if that fails. From <NAME>'s code,", "string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line", "default size if that fails. From <NAME>'s code, http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to", "termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except ImportError: return", "char_limit: next_line = string string = '' elif '\\n' in string[0:char_limit]: first_newline =", "next_line = string[0:first_newline] string = string[(first_newline + 1):] elif ' ' not in", "+ post while len(string) > 0: (cols, _) = terminal_size() (next_line, string) =", "<= (cols - len(next_line))/2: spaces += ' ' i += 1 out_streams.write(' '", "__copyright__ = 'Copyright 2011 <NAME>' __license__ = 'ISC' __version__ = '0.5.0.0' __status__ =", "re.sub('^[ \\t]+', '', post) string = pre + string + post while len(string)", "pre + string + post while len(string) > 0: (cols, _) = terminal_size()", "pre='', post='\\n\\n'): 'Print the string, broken into lines, to the output streams.' if", "\\t]+', '', post) string = pre + string + post while len(string) >", "'Lineate the string based on the passed-in character limit.' if len(string) <= char_limit:", "(0, 0): return None return rows_cols def terminal_size(): \"\"\"Determine the terminal size or", "rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not rows_cols: # Some shells", "string + post (cols, _) = terminal_size() while len(string) > 0: (next_line, string)", "= string[0:last_space] string = string[(last_space + 1):] return (next_line, string) def present(string, out_streams,", "string[0:last_space] string = string[(last_space + 1):] return (next_line, string) def present(string, out_streams, pre='',", "string[(last_space + 1):] return (next_line, string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print the", "return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols, rows. def _break_words(string, char_limit): 'Lineate", "if rows_cols == (0, 0): return None return rows_cols def terminal_size(): \"\"\"Determine the", "ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not rows_cols: # Some shells may set", "'Development' import os import re import struct def ioctl_term_size(filed): 'Attempt to find terminal", "= re.sub('^[ \\t]+', '', post) string = pre + string + post while", "next_line = next_line[1:] spaces = '' i = 1 while i <= (cols", "elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline", "= _break_words(string, cols) while len(next_line) > 0 and next_line[0] == '\\n': out_streams.write('\\n') next_line", "' ' i += 1 out_streams.write(' ' + spaces + next_line) if len(string)", "text.' __author__ = '<NAME>' __copyright__ = 'Copyright 2011 <NAME>' __license__ = 'ISC' __version__", "post) string = pre + string + post while len(string) > 0: (cols,", "re import struct def ioctl_term_size(filed): 'Attempt to find terminal dimensions using an IO", "= pre + string + post (cols, _) = terminal_size() while len(string) >", "string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print the string, broken into lines, to", "in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols:", "last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space] string = string[(last_space + 1):] return", "rows_cols == (0, 0): return None return rows_cols def terminal_size(): \"\"\"Determine the terminal", "except ImportError: return None if rows_cols == (0, 0): return None return rows_cols", "= next_line[1:] spaces = '' i = 1 while i <= (cols -", "'\\n': post = re.sub('^[ \\t]+', '', post) string = pre + string +", "'Center the output and print it to the output streams.' string = pre", "to find terminal dimensions using an IO Control system call.' try: import fcntl,", "string[0:first_newline] string = string[(first_newline + 1):] elif ' ' not in string[0:char_limit]: next_line", "shells may set these environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return", "packed) except ImportError: return None if rows_cols == (0, 0): return None return", "ioctl_term_size(filed): 'Attempt to find terminal dimensions using an IO Control system call.' try:", "= (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it to cols,", "(next_line, string) = _break_words(string, cols) out_streams.write(next_line) if len(string) > 0: out_streams.write('\\n') out_streams.write(string) def", "Control system call.' try: import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols", "string.index('\\n') next_line = string[0:first_newline] string = string[(first_newline + 1):] elif ' ' not", "= 'Development' import os import re import struct def ioctl_term_size(filed): 'Attempt to find", "and print it to the output streams.' string = pre + string +", "string string = '' elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line =", "an IO Control system call.' try: import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ,", "def _break_words(string, char_limit): 'Lineate the string based on the passed-in character limit.' if", "__version__ = '0.5.0.0' __status__ = 'Development' import os import re import struct def", "or ioctl_term_size(2) if not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed)", "Reverses it to cols, rows. def _break_words(string, char_limit): 'Lineate the string based on", "i += 1 out_streams.write(' ' + spaces + next_line) if len(string) > 0:", "'' i = 1 while i <= (cols - len(next_line))/2: spaces += '", "string = string[(last_space + 1):] return (next_line, string) def present(string, out_streams, pre='', post='\\n\\n'):", "== (0, 0): return None return rows_cols def terminal_size(): \"\"\"Determine the terminal size", "Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try: filed", "string, broken into lines, to the output streams.' if len(string) == 0: return", "= fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed) except ImportError: return None if", "or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols", "try: import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh', packed)", "post='\\n\\n'): 'Print the string, broken into lines, to the output streams.' if len(string)", "GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try:", "'Copyright 2011 <NAME>' __license__ = 'ISC' __version__ = '0.5.0.0' __status__ = 'Development' import", "= 'Copyright 2011 <NAME>' __license__ = 'ISC' __version__ = '0.5.0.0' __status__ = 'Development'", "center(string, out_streams, pre='', post='\\n'): 'Center the output and print it to the output", "# Reverses it to cols, rows. def _break_words(string, char_limit): 'Lineate the string based", "= ioctl_term_size(filed) os.close(filed) except AttributeError: pass if not rows_cols: # Some shells may", "on the passed-in character limit.' if len(string) <= char_limit: next_line = string string", "into lines, to the output streams.' if len(string) == 0: return if string[-1:]", "1):] return (next_line, string) def present(string, out_streams, pre='', post='\\n\\n'): 'Print the string, broken", "while len(string) > 0: (cols, _) = terminal_size() (next_line, string) = _break_words(string, cols)", "return rows_cols def terminal_size(): \"\"\"Determine the terminal size or set a default size", "to allow Curveship to run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or ioctl_term_size(1)", "out_streams, pre='', post='\\n\\n'): 'Print the string, broken into lines, to the output streams.'", "string = string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line = string[0:last_space] string =", "in string[0:char_limit]: next_line = string[0:char_limit] string = string[char_limit:] else: last_space = string[0:char_limit].rindex(' ')", "http://pdos.csail.mit.edu/~cblake/cls/cls.py Modifications by <NAME> to allow Curveship to run in GNU Emacs.\"\"\" rows_cols", "__status__ = 'Development' import os import re import struct def ioctl_term_size(filed): 'Attempt to", "if string[-1:] == '\\n': post = re.sub('^[ \\t]+', '', post) string = pre", "return if string[-1:] == '\\n': post = re.sub('^[ \\t]+', '', post) string =", "<NAME> to allow Curveship to run in GNU Emacs.\"\"\" rows_cols = ioctl_term_size(0) or", "find terminal dimensions using an IO Control system call.' try: import fcntl, termios", "post = re.sub('^[ \\t]+', '', post) string = pre + string + post", "> 0: (cols, _) = terminal_size() (next_line, string) = _break_words(string, cols) out_streams.write(next_line) if", "0 and next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces = '' i", "string) = _break_words(string, cols) while len(next_line) > 0 and next_line[0] == '\\n': out_streams.write('\\n')", "not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY) rows_cols = ioctl_term_size(filed) os.close(filed) except AttributeError:", "pre='', post='\\n'): 'Center the output and print it to the output streams.' string", "ioctl_term_size(0) or ioctl_term_size(1) or ioctl_term_size(2) if not rows_cols: try: filed = os.open(os.ctermid(), os.O_RDONLY)", "variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0]) # Reverses it", "and display the output text.' __author__ = '<NAME>' __copyright__ = 'Copyright 2011 <NAME>'", "os import re import struct def ioctl_term_size(filed): 'Attempt to find terminal dimensions using", "None return rows_cols def terminal_size(): \"\"\"Determine the terminal size or set a default", "string = '' elif '\\n' in string[0:char_limit]: first_newline = string.index('\\n') next_line = string[0:first_newline]", "next_line = string[0:char_limit] string = string[char_limit:] else: last_space = string[0:char_limit].rindex(' ') next_line =", "set these environment variables. rows_cols = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) return int(rows_cols[1]), int(rows_cols[0])", "next_line[0] == '\\n': out_streams.write('\\n') next_line = next_line[1:] spaces = '' i = 1", "string[0:char_limit].rindex(' ') next_line = string[0:last_space] string = string[(last_space + 1):] return (next_line, string)", "call.' try: import fcntl, termios packed = fcntl.ioctl(filed, termios.TIOCGWINSZ, '1234') rows_cols = struct.unpack('hh',", "Modifications by <NAME> to allow Curveship to run in GNU Emacs.\"\"\" rows_cols =", "'ISC' __version__ = '0.5.0.0' __status__ = 'Development' import os import re import struct", "pass if not rows_cols: # Some shells may set these environment variables. rows_cols", "print it to the output streams.' string = pre + string + post", "spaces += ' ' i += 1 out_streams.write(' ' + spaces + next_line)", "terminal_size(): \"\"\"Determine the terminal size or set a default size if that fails.", "_break_words(string, cols) while len(next_line) > 0 and next_line[0] == '\\n': out_streams.write('\\n') next_line =", "based on the passed-in character limit.' if len(string) <= char_limit: next_line = string", "def center(string, out_streams, pre='', post='\\n'): 'Center the output and print it to the", "i = 1 while i <= (cols - len(next_line))/2: spaces += ' '" ]
[ "provide a status endpoint, and put that URL in the header return jsonify({}),", "import log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s).", "we'd provide a status endpoint, and put that URL in the header return", "for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a status endpoint,", ". import api from .logging import log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST'])", "\"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger sent. \"\"\" for product_resource_url in request.json['product_urls']:", "build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger sent. \"\"\" for product_resource_url in", "log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode", ":statuscode 202: Dashboard rebuild trigger sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config)", "dashboard(s). :statuscode 202: Dashboard rebuild trigger sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url,", "a status endpoint, and put that URL in the header return jsonify({}), 202,", "@log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger sent. \"\"\" for", "methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger sent. \"\"\"", "def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger sent. \"\"\" for product_resource_url", "<filename>app/routes/build.py \"\"\"Routes at ``/build`` that implement dashboard builds.\"\"\" from flask import jsonify, request,", "from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202:", "from . import api from .logging import log_route from ..worker import build_dashboard_for_product @api.route('/build',", "api from .logging import log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def", "Dashboard rebuild trigger sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally", "trigger sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide", "jsonify, request, current_app from . import api from .logging import log_route from ..worker", "flask import jsonify, request, current_app from . import api from .logging import log_route", "build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger", "from flask import jsonify, request, current_app from . import api from .logging import", "..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard", "in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a status endpoint, and put", "current_app.config) # Ideally we'd provide a status endpoint, and put that URL in", "dashboard builds.\"\"\" from flask import jsonify, request, current_app from . import api from", "implement dashboard builds.\"\"\" from flask import jsonify, request, current_app from . import api", "request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a status endpoint, and put that", "request, current_app from . import api from .logging import log_route from ..worker import", "builds.\"\"\" from flask import jsonify, request, current_app from . import api from .logging", "import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild", "at ``/build`` that implement dashboard builds.\"\"\" from flask import jsonify, request, current_app from", "status endpoint, and put that URL in the header return jsonify({}), 202, {}", "from .logging import log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards():", "product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a status endpoint, and", "that implement dashboard builds.\"\"\" from flask import jsonify, request, current_app from . import", "``/build`` that implement dashboard builds.\"\"\" from flask import jsonify, request, current_app from .", "202: Dashboard rebuild trigger sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) #", "sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a", "Ideally we'd provide a status endpoint, and put that URL in the header", "\"\"\"Routes at ``/build`` that implement dashboard builds.\"\"\" from flask import jsonify, request, current_app", "@api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build dashboard(s). :statuscode 202: Dashboard rebuild trigger sent.", "\"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a status", "# Ideally we'd provide a status endpoint, and put that URL in the", "import api from .logging import log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route", ".logging import log_route from ..worker import build_dashboard_for_product @api.route('/build', methods=['POST']) @log_route def build_dashboards(): \"\"\"Build", "build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd provide a status endpoint, and put that URL", "rebuild trigger sent. \"\"\" for product_resource_url in request.json['product_urls']: build_dashboard_for_product(product_resource_url, current_app.config) # Ideally we'd", "current_app from . import api from .logging import log_route from ..worker import build_dashboard_for_product", "import jsonify, request, current_app from . import api from .logging import log_route from" ]
[ "#Al final, retorna el índice de la mejor caracteristica. def majorityCnt(classList): #Funcion que", "#Esta funcion se encarga de la creación del arbol de manera recursiva classList", "0.0; bestFeature = -1 #inicializa los valores para la mejor ganancia y la", "subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma la entropia dada para este subconjunto", "de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir", "restantes menos la columna del axis retDataSet.append(reducedFeatVec) #agrega la fila a la matriz", "del(labels[bestFeat]) #elimina las etiquetas de las mejores caracteristicas del vector de etiquetas, en", "llave, que viene a ser un conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro", "añade a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -=", "return classLabel #retorna la etiqueta de clase def storeTree(inputTree,filename): #Funcion que guarda un", "dataset bestInfoGain = 0.0; bestFeature = -1 #inicializa los valores para la mejor", "decir, el índice de la mejor caracteristica return bestFeature #Al final, retorna el", "{} #Diccionario donde se guarda la cuenta de cada clase for featVec in", "la de la variable, asigna esta como la nueva mejor bestFeature = i", "#como el arbol es un diccionario de diccionarios, con esto obtiene el valor", "para despues poder hacer las siguientes divisiones uniqueVals = set(featValues) #aqui obtiene los", "vector de entrada valueOfFeat = secondDict[key] #Se adentra al arbol, obteniendo el siguiente", "se retornará la division for featVec in dataSet: #for que recorre cada uno", "#for que recorre todas las caracteristicas featList = [example[i] for example in dataSet]#Crea", "la clase mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga de la creación del", "classList.count(classList[0]) == len(classList): #Comprueba si el numero de clases es equivalente al tamaño", "a si mismo con los nuevos valores del arbol (recursividad) else: classLabel =", "de clases es equivalente al tamaño de la lista de clases return classList[0]#En", "llave, es decir, el valor de la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene", "prob * calcShannonEnt(subDataSet) #Suma la entropia dada para este subconjunto infoGain = baseEntropy", "con sus caracteristicas currentLabel = featVec[-1] #Establece currentLable al valor del ultimo elemento", "(columnas). Es importante recalcar que la ultima columna pertenece a la etiqueta de", "que no, añade la key al diccionario y la establece en 0 labelCounts[currentLabel]", "1, 'yes'], [1, 0, 'no'], [0, 1, 'no'], [0, 1, 'no']] labels =", "bestFeature #Al final, retorna el índice de la mejor caracteristica. def majorityCnt(classList): #Funcion", "[example[-1] for example in dataSet] #La lista de clases pertenece a la ultima", "lista con todas las filas que tienen la caracteristica \"i\" uniqueVals = set(featList)", "el valor de la caracteristica que corresponde con la del arbol del vector", "existe ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En caso de que si,", "import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga un", "* calcShannonEnt(subDataSet) #Suma la entropia dada para este subconjunto infoGain = baseEntropy -", "#Tambien guarda el valor de i, es decir, el índice de la mejor", "i, value) #Obtiene el subconjunto de datos con la caracteristica i prob =", "= list(inputTree)[0] #como el arbol es un diccionario de diccionarios, con esto obtiene", "del arbol key = testVec[featIndex] #Obtiene el valor de la caracteristica que corresponde", "majorityCnt(classList) #Retorna el valor de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la", "fila a la matriz de retorno return retDataSet #retorna todos aquellos datos segun", "hacer las siguientes divisiones uniqueVals = set(featValues) #aqui obtiene los valores únicos de", "actual del arbol key = testVec[featIndex] #Obtiene el valor de la caracteristica que", "la lista de clases if vote not in classCount.keys(): classCount[vote] = 0 #En", "de cada una de las etiquetas y lo añade a la \"suma\" prob", "etiquetas, en resumen, dividió las etiquetas en 2 featValues = [example[bestFeat] for example", "el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para dar el resultado", "entropia en ceros for key in labelCounts: #En este for calcula la probabilidad", "la condicion dada como parametro reducedFeatVec = featVec[:axis] #crea una fila con todos", "de clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa del dataset bestInfoGain =", "inputTree[firstStr] #Aqui obtiene el valor de la primera llave, que viene a ser", "el valor de la primera llave, que viene a ser un conjunto de", "#retorna la etiqueta de clase def storeTree(inputTree,filename): #Funcion que guarda un arbol en", "de clase def storeTree(inputTree,filename): #Funcion que guarda un arbol en un archivo import", "arbol key = testVec[featIndex] #Obtiene el valor de la caracteristica que corresponde con", "la probabilidad shannonEnt -= prob * log(prob,2) #aqui calcula la entropia y la", "uniqueVals: #for que recorre los valores unicos de la mejor caracteristica subLabels =", "de mayor a menor return sortedClassCount[0][0] #retorna el primer elemento, es decir, la", "import log import operator def createDataSet(): #Funcion que retorna un mequeño dataset dataSet", "labelCounts[currentLabel] += 1 #aumenta en uno el valor de la clase shannonEnt =", "classLabel = valueOfFeat #en caso contrario, el valor de la etiqueta de clase", "del dataset if featVec[axis] == value: #comprueba si cumple con la condicion dada", "siguientes divisiones uniqueVals = set(featValues) #aqui obtiene los valores únicos de esta caractetistica", "= createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno de los valores únicos de", "es la funcion nucleo del algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene el", "= inputTree[firstStr] #Aqui obtiene el valor de la primera llave, que viene a", "al arbol, obteniendo el siguiente arbol que corresponde a la caracteristica dada por", "numFeatures = len(dataSet[0]) - 1 #Obtiene el numero de caracteristicas (columnas). Es importante", "= classify(valueOfFeat, featLabels, testVec) #En caso de que si, se llama a si", "cada clase for featVec in dataSet: #Obtiene los elementos únicos con sus caracteristicas", "for featVec in dataSet: #Obtiene los elementos únicos con sus caracteristicas currentLabel =", "al tamaño de la lista de clases return classList[0]#En caso de que si,", "entropia y la añade a la suma, utiliza logaritmo base 2 return shannonEnt", "la mejor caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de aquellos", "de la primera llave, que viene a ser un conjunto de arboles featIndex", "las etiqutas, pues si enviara tal cual el valor de labels, entre los", "el valor de la primer llave, es decir, el valor de la etiqueta", "mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de las mejores caracteristicas del vector de", "recursiva firstStr = list(inputTree)[0] #como el arbol es un diccionario de diccionarios, con", "sortedClassCount[0][0] #retorna el primer elemento, es decir, la clase mayoritaria def createTree(dataSet,labels): #Esta", "labelCounts = {} #Diccionario donde se guarda la cuenta de cada clase for", "#Comprueba si realmente existe ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En caso", "return myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para", "bestInfoGain = 0.0; bestFeature = -1 #inicializa los valores para la mejor ganancia", "#Obtiene el subconjunto de datos con la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene", "0 #En caso de que no exista la key, la añade como cero", "este subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma la entropia dada para este", "la entropia for value in uniqueVals: #For que recorre todos los valores únicos", "= 0.0 #Inicializa el valor de la entropia for value in uniqueVals: #For", "== len(classList): #Comprueba si el numero de clases es equivalente al tamaño de", "la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del algoritmo numFeatures", "#Inicializa la entropia en ceros for key in labelCounts: #En este for calcula", "de la mejor caracteristica return bestFeature #Al final, retorna el índice de la", "def createTree(dataSet,labels): #Esta funcion se encarga de la creación del arbol de manera", "termina la recursividad y retorna la clase if len(dataSet[0]) == 1: #De la", "llama a si mismo con los nuevos valores del arbol (recursividad) else: classLabel", "return sortedClassCount[0][0] #retorna el primer elemento, es decir, la clase mayoritaria def createTree(dataSet,labels):", "-1 #inicializa los valores para la mejor ganancia y la mejor caracteristica for", "in uniqueVals: #For que recorre todos los valores únicos de la caracteristica \"i\"", "etiquetas de aquellos con la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario", "columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del algoritmo numFeatures =", "recorre cada uno de las filas del dataset if featVec[axis] == value: #comprueba", "#asigna por cada uno de los valores únicos de la mejor etiqueta el", "ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En caso de que si, se", "donde se guardará la cuenta de cada clase for vote in classList: #For", "etiqueta el siguiente subarbol return myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que", "#Si es mejor que la de la variable, asigna esta como la nueva", "suma, utiliza logaritmo base 2 return shannonEnt #retorna la entropia def splitDataSet(dataSet, axis,", "valor de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a", "la creación del arbol de manera recursiva classList = [example[-1] for example in", "el valor de labels, entre los subarboles estarian accediendo a la misma variable", "arbol, obteniendo el siguiente arbol que corresponde a la caracteristica dada por el", "valores únicos de la mejor etiqueta el siguiente subarbol return myTree #retorna el", "si realmente existe ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En caso de", "tamaño de la lista de clases return classList[0]#En caso de que si, significa", "en resumen, dividió las etiquetas en 2 featValues = [example[bestFeat] for example in", "uno de las filas del dataset if featVec[axis] == value: #comprueba si cumple", "la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene el subconjunto de datos", "if featVec[axis] == value: #comprueba si cumple con la condicion dada como parametro", "nivel actual del arbol key = testVec[featIndex] #Obtiene el valor de la caracteristica", "chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas", "es decir, el valor de la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el", "example in dataSet]#Crea una lista con todas las filas que tienen la caracteristica", "la del nivel actual del arbol key = testVec[featIndex] #Obtiene el valor de", "de los valores únicos de la mejor etiqueta el siguiente subarbol return myTree", "= featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene aquella que corresponde con la", "con esto obtiene el valor de la primer llave, es decir, el valor", "for example in dataSet]#Crea una lista con todas las filas que tienen la", "bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de aquellos con la mejor caracteristica myTree", "mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya llave es", "se encuentra la clase actual en la variable de labelCounts, en caso de", "caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de aquellos con la", "la clase if len(dataSet[0]) == 1: #De la misma manera, si no hay", "def storeTree(inputTree,filename): #Funcion que guarda un arbol en un archivo import pickle fw", "key in labelCounts: #En este for calcula la probabilidad de cada una de", "la key al diccionario y la establece en 0 labelCounts[currentLabel] += 1 #aumenta", "lo que ahi termina la recursividad y retorna la clase if len(dataSet[0]) ==", "estarian accediendo a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por", "clase corresponde al valor de la caracteristica return classLabel #retorna la etiqueta de", "featVec[axis] == value: #comprueba si cumple con la condicion dada como parametro reducedFeatVec", "de labelCounts, en caso de que no, añade la key al diccionario y", "valor de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del", "menor return sortedClassCount[0][0] #retorna el primer elemento, es decir, la clase mayoritaria def", "valores únicos de esta caractetistica for value in uniqueVals: #for que recorre los", "enviara tal cual el valor de labels, entre los subarboles estarian accediendo a", "#Esta funcion calcula la entropia de shannon del dataset numEntries = len(dataSet) #Obtiene", "encarga de la creación del arbol de manera recursiva classList = [example[-1] for", "cual el valor de labels, entre los subarboles estarian accediendo a la misma", "mejor caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de aquellos con", "de la mejor etiqueta el siguiente subarbol return myTree #retorna el arbol def", "(recursividad) else: classLabel = valueOfFeat #en caso contrario, el valor de la etiqueta", "labels def calcShannonEnt(dataSet): #Esta funcion calcula la entropia de shannon del dataset numEntries", "mejor ganancia y la mejor caracteristica for i in range(numFeatures): #for que recorre", "recursividad y retorna la clase if len(dataSet[0]) == 1: #De la misma manera,", "dar el resultado de la clasificacion, tambien es una funcion recursiva firstStr =", "del dataset numEntries = len(dataSet) #Obtiene el numero de filas del dataset labelCounts", "arbol de manera recursiva classList = [example[-1] for example in dataSet] #La lista", "#aumenta en uno el valor de la clase shannonEnt = 0.0 #Inicializa la", "if classList.count(classList[0]) == len(classList): #Comprueba si el numero de clases es equivalente al", "un arbol para dar el resultado de la clasificacion, tambien es una funcion", "#En caso de que si, se llama a si mismo con los nuevos", "la mejor caracteristica return bestFeature #Al final, retorna el índice de la mejor", "cada uno de los valores únicos de la mejor etiqueta el siguiente subarbol", "subconjunto infoGain = baseEntropy - newEntropy #Calcula la ganancia if (infoGain > bestInfoGain):", "classList = [example[-1] for example in dataSet] #La lista de clases pertenece a", "es decir, el índice de la mejor caracteristica return bestFeature #Al final, retorna", "'no'], [0, 1, 'no'], [0, 1, 'no']] labels = ['no surfacing','flippers'] #change to", "#retorna todos aquellos datos segun el valor de la columna dado def chooseBestFeatureToSplit(dataSet):", "es equivalente al tamaño de la lista de clases return classList[0]#En caso de", "clase if len(dataSet[0]) == 1: #De la misma manera, si no hay más", "la clasificacion, tambien es una funcion recursiva firstStr = list(inputTree)[0] #como el arbol", "calcula la probabilidad shannonEnt -= prob * log(prob,2) #aqui calcula la entropia y", "subDataSet = splitDataSet(dataSet, i, value) #Obtiene el subconjunto de datos con la caracteristica", "la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de las mejores caracteristicas del vector", "tienen la caracteristica \"i\" uniqueVals = set(featList) #Obtiene un conjunto con todos los", "guardará la cuenta de cada clase for vote in classList: #For que recorre", "asigna esta como la nueva mejor bestFeature = i #Tambien guarda el valor", "las filas del dataset cuya columna (axis) sea igual al valor dado retDataSet", "labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya se encuentra la clase actual en", "caracteristicas del vector de etiquetas, en resumen, dividió las etiquetas en 2 featValues", "corresponde con la del nivel actual del arbol key = testVec[featIndex] #Obtiene el", "mismo con los nuevos valores del arbol (recursividad) else: classLabel = valueOfFeat #en", "del vector de caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba", "la recursividad y retorna la clase if len(dataSet[0]) == 1: #De la misma", "del vector de etiquetas, en resumen, dividió las etiquetas en 2 featValues =", "caracteristica return bestFeature #Al final, retorna el índice de la mejor caracteristica. def", "value): #Funcion que retorna todas las filas del dataset cuya columna (axis) sea", "con la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto", "#Ordena de mayor a menor return sortedClassCount[0][0] #retorna el primer elemento, es decir,", "= [example[bestFeat] for example in dataSet] #Obtiene los valores de la mejor caracteristica", "la fila a la matriz de retorno return retDataSet #retorna todos aquellos datos", "etiqueta de clase corresponde al valor de la caracteristica return classLabel #retorna la", "exista la key, la añade como cero classCount[vote] += 1 #suma uno al", "del arbol de manera recursiva classList = [example[-1] for example in dataSet] #La", "elementos únicos con sus caracteristicas currentLabel = featVec[-1] #Establece currentLable al valor del", "cuenta de cada clase for vote in classList: #For que recorre la lista", "mejor etiqueta el siguiente subarbol return myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion", "valor de la primera llave, que viene a ser un conjunto de arboles", "for value in uniqueVals: #For que recorre todos los valores únicos de la", "dataset, deja de dividirse return majorityCnt(classList) #Retorna el valor de la clase mayoritaria", "vote not in classCount.keys(): classCount[vote] = 0 #En caso de que no exista", "entropia dada para este subconjunto infoGain = baseEntropy - newEntropy #Calcula la ganancia", "mejor caracteristica. def majorityCnt(classList): #Funcion que retorna el valor de la clase mayoritaria", "la ultima columna del dataset, por eso la asigna a classList if classList.count(classList[0])", "antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la columna del axis", "vector de caracteristicas, obtiene aquella que corresponde con la del nivel actual del", "float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -= prob * log(prob,2) #aqui calcula la", "diccionarios cuya llave es la de la etiqueta con la mejor caracteristica del(labels[bestFeat])", "un diccionario de diccionarios cuya llave es la de la etiqueta con la", "numero de caracteristicas (columnas). Es importante recalcar que la ultima columna pertenece a", "\"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -= prob * log(prob,2)", "un conjunto con todos los valores únicos para esa caracteristica dada newEntropy =", "= [] #variable donde se retornará la division for featVec in dataSet: #for", "len(dataSet[0]) == 1: #De la misma manera, si no hay más caracteristicas en", "not in classCount.keys(): classCount[vote] = 0 #En caso de que no exista la", "cuya columna (axis) sea igual al valor dado retDataSet = [] #variable donde", "la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el valor de la primera llave,", "dada por el vector de entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente existe", "surfacing','flippers'] #change to discrete values return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula", "= splitDataSet(dataSet, i, value) #Obtiene el subconjunto de datos con la caracteristica i", "mejor caracteristica para despues poder hacer las siguientes divisiones uniqueVals = set(featValues) #aqui", "\"i\" uniqueVals = set(featList) #Obtiene un conjunto con todos los valores únicos para", "decir, el valor de la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el valor", "labels[bestFeat] #Obtiene las etiquetas de aquellos con la mejor caracteristica myTree = {bestFeatLabel:{}}", "de cada clase for featVec in dataSet: #Obtiene los elementos únicos con sus", "a la etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa del", "la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -= prob *", "la division for featVec in dataSet: #for que recorre cada uno de las", "el dataset, deja de dividirse return majorityCnt(classList) #Retorna el valor de la clase", "deja de dividirse return majorityCnt(classList) #Retorna el valor de la clase mayoritaria bestFeat", "de diccionarios cuya llave es la de la etiqueta con la mejor caracteristica", "para este subconjunto infoGain = baseEntropy - newEntropy #Calcula la ganancia if (infoGain", "la clase actual en la variable de labelCounts, en caso de que no,", "#Suma la entropia dada para este subconjunto infoGain = baseEntropy - newEntropy #Calcula", "el siguiente subarbol return myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre", "obtiene aquella que corresponde con la del nivel actual del arbol key =", "final, retorna el índice de la mejor caracteristica. def majorityCnt(classList): #Funcion que retorna", "de la creación del arbol de manera recursiva classList = [example[-1] for example", "1: #De la misma manera, si no hay más caracteristicas en el dataset,", "funcion nucleo del algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene el numero de", "hay más caracteristicas en el dataset, deja de dividirse return majorityCnt(classList) #Retorna el", "valor de la primer llave, es decir, el valor de la etiqueta secondDict", "la entropia def splitDataSet(dataSet, axis, value): #Funcion que retorna todas las filas del", "#Dentro del vector de caracteristicas, obtiene aquella que corresponde con la del nivel", "una de las etiquetas y lo añade a la \"suma\" prob = float(labelCounts[key])/numEntries", "decir, la clase mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga de la creación", "la funcion nucleo del algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene el numero", "tambien es una funcion recursiva firstStr = list(inputTree)[0] #como el arbol es un", "valueOfFeat = secondDict[key] #Se adentra al arbol, obteniendo el siguiente arbol que corresponde", "else: classLabel = valueOfFeat #en caso contrario, el valor de la etiqueta de", "la asigna a classList if classList.count(classList[0]) == len(classList): #Comprueba si el numero de", "example in dataSet] #La lista de clases pertenece a la ultima columna del", "funcion calcula la entropia de shannon del dataset numEntries = len(dataSet) #Obtiene el", "dada como parametro reducedFeatVec = featVec[:axis] #crea una fila con todos los datos", "in dataSet] #Obtiene los valores de la mejor caracteristica para despues poder hacer", "baseEntropy - newEntropy #Calcula la ganancia if (infoGain > bestInfoGain): #Compara la ganancia", "conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene aquella", "la caracteristica return classLabel #retorna la etiqueta de clase def storeTree(inputTree,filename): #Funcion que", "range(numFeatures): #for que recorre todas las caracteristicas featList = [example[i] for example in", "funcion recursiva firstStr = list(inputTree)[0] #como el arbol es un diccionario de diccionarios,", "uno al valor de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de", "= featVec[:axis] #crea una fila con todos los datos hasta antes del axis", "ultima columna del dataset, por eso la asigna a classList if classList.count(classList[0]) ==", "a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno", "caracteristica para despues poder hacer las siguientes divisiones uniqueVals = set(featValues) #aqui obtiene", "más caracteristicas en el dataset, deja de dividirse return majorityCnt(classList) #Retorna el valor", "+= 1 #aumenta en uno el valor de la clase shannonEnt = 0.0", "de la entropia for value in uniqueVals: #For que recorre todos los valores", "prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto newEntropy += prob *", "probabilidad de cada una de las etiquetas y lo añade a la \"suma\"", "currentLabel = featVec[-1] #Establece currentLable al valor del ultimo elemento del vector de", "esa caracteristica dada newEntropy = 0.0 #Inicializa el valor de la entropia for", "que retorna un mequeño dataset dataSet = [[1, 1, 'yes'], [1, 1, 'yes'],", "elemento, es decir, la clase mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga de", "secondDict = inputTree[firstStr] #Aqui obtiene el valor de la primera llave, que viene", "caracteristica \"i\" uniqueVals = set(featList) #Obtiene un conjunto con todos los valores únicos", "entropia for value in uniqueVals: #For que recorre todos los valores únicos de", "caracteristicas currentLabel = featVec[-1] #Establece currentLable al valor del ultimo elemento del vector", "cuenta de cada clase for featVec in dataSet: #Obtiene los elementos únicos con", "que retorna el valor de la clase mayoritaria classCount={} #Diccionario donde se guardará", "en 2 featValues = [example[bestFeat] for example in dataSet] #Obtiene los valores de", "featIndex = featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene aquella que corresponde con", "key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor return sortedClassCount[0][0]", "aquellos con la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario de diccionarios", "shannon del dataset numEntries = len(dataSet) #Obtiene el numero de filas del dataset", "la del arbol del vector de entrada valueOfFeat = secondDict[key] #Se adentra al", "filas del dataset cuya columna (axis) sea igual al valor dado retDataSet =", "axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la columna del axis retDataSet.append(reducedFeatVec) #agrega", "el numero de caracteristicas (columnas). Es importante recalcar que la ultima columna pertenece", "#calcula la entropia completa del dataset bestInfoGain = 0.0; bestFeature = -1 #inicializa", "únicos para esa caracteristica dada newEntropy = 0.0 #Inicializa el valor de la", "el resultado de la clasificacion, tambien es una funcion recursiva firstStr = list(inputTree)[0]", "esta como la nueva mejor bestFeature = i #Tambien guarda el valor de", "uniqueVals = set(featList) #Obtiene un conjunto con todos los valores únicos para esa", "de la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el valor de la primera", "aquella que corresponde con la del nivel actual del arbol key = testVec[featIndex]", "al valor del ultimo elemento del vector de caracteristicas if currentLabel not in", "el índice de la mejor caracteristica return bestFeature #Al final, retorna el índice", "de la etiqueta de clase corresponde al valor de la caracteristica return classLabel", "sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor return sortedClassCount[0][0] #retorna", "return classList[0]#En caso de que si, significa que todas las clases son iguales", "etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de las mejores caracteristicas", "no hay más caracteristicas en el dataset, deja de dividirse return majorityCnt(classList) #Retorna", "example in dataSet] #Obtiene los valores de la mejor caracteristica para despues poder", "retornará la division for featVec in dataSet: #for que recorre cada uno de", "#Funcion que carga un arbol de un archivo import pickle fr = open(filename,'rb')", "guarda la cuenta de cada clase for featVec in dataSet: #Obtiene los elementos", "en la variable de labelCounts, en caso de que no, añade la key", "de la etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de las", "clases return classList[0]#En caso de que si, significa que todas las clases son", "= [example[i] for example in dataSet]#Crea una lista con todas las filas que", "la ganancia con la de mejor ganancia bestInfoGain = infoGain #Si es mejor", "clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir bestFeatLabel =", "adentra al arbol, obteniendo el siguiente arbol que corresponde a la caracteristica dada", "no exista la key, la añade como cero classCount[vote] += 1 #suma uno", "subarboles estarian accediendo a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna", "= [[1, 1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0, 1, 'no'],", "de clases return classList[0]#En caso de que si, significa que todas las clases", "de datos con la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de", "un mequeño dataset dataSet = [[1, 1, 'yes'], [1, 1, 'yes'], [1, 0,", "índice de la mejor caracteristica. def majorityCnt(classList): #Funcion que retorna el valor de", "dataset dataSet = [[1, 1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0,", "aquellos datos segun el valor de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es", "[1, 0, 'no'], [0, 1, 'no'], [0, 1, 'no']] labels = ['no surfacing','flippers']", "== value: #comprueba si cumple con la condicion dada como parametro reducedFeatVec =", "obtiene el valor de la primera llave, que viene a ser un conjunto", "este subconjunto infoGain = baseEntropy - newEntropy #Calcula la ganancia if (infoGain >", "clase for vote in classList: #For que recorre la lista de clases if", "featVec in dataSet: #for que recorre cada uno de las filas del dataset", "y la establece en 0 labelCounts[currentLabel] += 1 #aumenta en uno el valor", "= {bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya llave es la de la", "de clase corresponde al valor de la caracteristica return classLabel #retorna la etiqueta", "in dataSet: #for que recorre cada uno de las filas del dataset if", "'no'], [0, 1, 'no']] labels = ['no surfacing','flippers'] #change to discrete values return", "= -1 #inicializa los valores para la mejor ganancia y la mejor caracteristica", "i #Tambien guarda el valor de i, es decir, el índice de la", "labels[:] #copia todas las etiqutas, pues si enviara tal cual el valor de", "labelCounts, en caso de que no, añade la key al diccionario y la", "-= prob * log(prob,2) #aqui calcula la entropia y la añade a la", "de i, es decir, el índice de la mejor caracteristica return bestFeature #Al", "de la variable, asigna esta como la nueva mejor bestFeature = i #Tambien", "un arbol en un archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def", "clase def storeTree(inputTree,filename): #Funcion que guarda un arbol en un archivo import pickle", "clases son iguales por lo que ahi termina la recursividad y retorna la", "de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor", "valor de la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el valor de la", "de caracteristicas, obtiene aquella que corresponde con la del nivel actual del arbol", "cada uno de las filas del dataset if featVec[axis] == value: #comprueba si", "infoGain = baseEntropy - newEntropy #Calcula la ganancia if (infoGain > bestInfoGain): #Compara", "la mejor caracteristica for i in range(numFeatures): #for que recorre todas las caracteristicas", "prob * log(prob,2) #aqui calcula la entropia y la añade a la suma,", "i in range(numFeatures): #for que recorre todas las caracteristicas featList = [example[i] for", "diccionario de diccionarios, con esto obtiene el valor de la primer llave, es", "'yes'], [1, 0, 'no'], [0, 1, 'no'], [0, 1, 'no']] labels = ['no", "de shannon del dataset numEntries = len(dataSet) #Obtiene el numero de filas del", "if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya se encuentra", "pues si enviara tal cual el valor de labels, entre los subarboles estarian", "del axis retDataSet.append(reducedFeatVec) #agrega la fila a la matriz de retorno return retDataSet", "que recorre cada uno de las filas del dataset if featVec[axis] == value:", "segun el valor de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion", "classLabel = classify(valueOfFeat, featLabels, testVec) #En caso de que si, se llama a", "#Obtiene las etiquetas de aquellos con la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea", "#Diccionario donde se guarda la cuenta de cada clase for featVec in dataSet:", "de caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya", "las filas del dataset if featVec[axis] == value: #comprueba si cumple con la", "entrada valueOfFeat = secondDict[key] #Se adentra al arbol, obteniendo el siguiente arbol que", "isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec)", "donde se retornará la division for featVec in dataSet: #for que recorre cada", "classCount.keys(): classCount[vote] = 0 #En caso de que no exista la key, la", "la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya llave", "todas las clases son iguales por lo que ahi termina la recursividad y", "manera, si no hay más caracteristicas en el dataset, deja de dividirse return", "la mejor caracteristica. def majorityCnt(classList): #Funcion que retorna el valor de la clase", "la mejor etiqueta el siguiente subarbol return myTree #retorna el arbol def classify(inputTree,featLabels,testVec):", "como cero classCount[vote] += 1 #suma uno al valor de la key sortedClassCount", "for calcula la probabilidad de cada una de las etiquetas y lo añade", "mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga de la creación del arbol de", "#Compara la ganancia con la de mejor ganancia bestInfoGain = infoGain #Si es", "storeTree(inputTree,filename): #Funcion que guarda un arbol en un archivo import pickle fw =", "set(featList) #Obtiene un conjunto con todos los valores únicos para esa caracteristica dada", "#Funcion que retorna el valor de la clase mayoritaria classCount={} #Diccionario donde se", "#Obtiene los valores de la mejor caracteristica para despues poder hacer las siguientes", "caracteristica subLabels = labels[:] #copia todas las etiqutas, pues si enviara tal cual", "1 #suma uno al valor de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)", "el valor de la clase shannonEnt = 0.0 #Inicializa la entropia en ceros", "entropia de shannon del dataset numEntries = len(dataSet) #Obtiene el numero de filas", "featVec in dataSet: #Obtiene los elementos únicos con sus caracteristicas currentLabel = featVec[-1]", "#Aqui obtiene el valor de la primera llave, que viene a ser un", "classList: #For que recorre la lista de clases if vote not in classCount.keys():", "def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del algoritmo numFeatures = len(dataSet[0]) -", "el valor de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo", "la ultima columna pertenece a la etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula", "un diccionario de diccionarios, con esto obtiene el valor de la primer llave,", "#Se adentra al arbol, obteniendo el siguiente arbol que corresponde a la caracteristica", "return bestFeature #Al final, retorna el índice de la mejor caracteristica. def majorityCnt(classList):", "if isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese subarbol classLabel = classify(valueOfFeat, featLabels,", "vote in classList: #For que recorre la lista de clases if vote not", "uniqueVals: #For que recorre todos los valores únicos de la caracteristica \"i\" subDataSet", "todas las filas que tienen la caracteristica \"i\" uniqueVals = set(featList) #Obtiene un", "dataset labelCounts = {} #Diccionario donde se guarda la cuenta de cada clase", "una fila con todos los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los", "= len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto newEntropy += prob * calcShannonEnt(subDataSet)", "resumen, dividió las etiquetas en 2 featValues = [example[bestFeat] for example in dataSet]", "de caracteristicas (columnas). Es importante recalcar que la ultima columna pertenece a la", "- 1 #Obtiene el numero de caracteristicas (columnas). Es importante recalcar que la", "#comprueba si cumple con la condicion dada como parametro reducedFeatVec = featVec[:axis] #crea", "la caracteristica que corresponde con la del arbol del vector de entrada valueOfFeat", "etiqueta de clase def storeTree(inputTree,filename): #Funcion que guarda un arbol en un archivo", "entropia def splitDataSet(dataSet, axis, value): #Funcion que retorna todas las filas del dataset", "esto obtiene el valor de la primer llave, es decir, el valor de", "ganancia if (infoGain > bestInfoGain): #Compara la ganancia con la de mejor ganancia", "a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -= prob", "unicos de la mejor caracteristica subLabels = labels[:] #copia todas las etiqutas, pues", "y lo añade a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad", "retDataSet.append(reducedFeatVec) #agrega la fila a la matriz de retorno return retDataSet #retorna todos", "#en caso contrario, el valor de la etiqueta de clase corresponde al valor", "todos aquellos datos segun el valor de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta", "si enviara tal cual el valor de labels, entre los subarboles estarian accediendo", "ultimo elemento del vector de caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] =", "esta caractetistica for value in uniqueVals: #for que recorre los valores unicos de", "vector de entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese subarbol classLabel", "createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno de los valores únicos de la", "key = testVec[featIndex] #Obtiene el valor de la caracteristica que corresponde con la", "mejor caracteristica return bestFeature #Al final, retorna el índice de la mejor caracteristica.", "índice de la mejor caracteristica return bestFeature #Al final, retorna el índice de", "caso de que si, se llama a si mismo con los nuevos valores", "valueOfFeat #en caso contrario, el valor de la etiqueta de clase corresponde al", "shannonEnt = 0.0 #Inicializa la entropia en ceros for key in labelCounts: #En", "la añade a la suma, utiliza logaritmo base 2 return shannonEnt #retorna la", "= calcShannonEnt(dataSet) #calcula la entropia completa del dataset bestInfoGain = 0.0; bestFeature =", "elemento del vector de caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0", "#Funcion que guarda un arbol en un archivo import pickle fw = open(filename,'wb')", "todas las caracteristicas featList = [example[i] for example in dataSet]#Crea una lista con", "valor de la clase shannonEnt = 0.0 #Inicializa la entropia en ceros for", "valor dado retDataSet = [] #variable donde se retornará la division for featVec", "la entropia de shannon del dataset numEntries = len(dataSet) #Obtiene el numero de", "recorre la lista de clases if vote not in classCount.keys(): classCount[vote] = 0", "#for que recorre los valores unicos de la mejor caracteristica subLabels = labels[:]", "arbol (recursividad) else: classLabel = valueOfFeat #en caso contrario, el valor de la", "de la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene el subconjunto de", "testVec[featIndex] #Obtiene el valor de la caracteristica que corresponde con la del arbol", "igual al valor dado retDataSet = [] #variable donde se retornará la division", "== 1: #De la misma manera, si no hay más caracteristicas en el", "la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno de", "que viene a ser un conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro del", "classLabel #retorna la etiqueta de clase def storeTree(inputTree,filename): #Funcion que guarda un arbol", "arboles featIndex = featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene aquella que corresponde", "añade a la suma, utiliza logaritmo base 2 return shannonEnt #retorna la entropia", "lista de clases return classList[0]#En caso de que si, significa que todas las", "añade como cero classCount[vote] += 1 #suma uno al valor de la key", "#Obtiene la mejor caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de", "for vote in classList: #For que recorre la lista de clases if vote", "la añade como cero classCount[vote] += 1 #suma uno al valor de la", "los valores de la mejor caracteristica para despues poder hacer las siguientes divisiones", "caractetistica for value in uniqueVals: #for que recorre los valores unicos de la", "bestFeature = -1 #inicializa los valores para la mejor ganancia y la mejor", "= featVec[-1] #Establece currentLable al valor del ultimo elemento del vector de caracteristicas", "valores únicos de la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene el", "dataset numEntries = len(dataSet) #Obtiene el numero de filas del dataset labelCounts =", "misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno de los", "shannonEnt -= prob * log(prob,2) #aqui calcula la entropia y la añade a", "el numero de filas del dataset labelCounts = {} #Diccionario donde se guarda", "subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En caso de que si, se llama", "vector de caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si", "de que si, significa que todas las clases son iguales por lo que", "for example in dataSet] #Obtiene los valores de la mejor caracteristica para despues", "de esta caractetistica for value in uniqueVals: #for que recorre los valores unicos", "#variable donde se retornará la division for featVec in dataSet: #for que recorre", "#Diccionario donde se guardará la cuenta de cada clase for vote in classList:", "numEntries = len(dataSet) #Obtiene el numero de filas del dataset labelCounts = {}", "discrete values return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula la entropia de", "la etiqueta de clase def storeTree(inputTree,filename): #Funcion que guarda un arbol en un", "establece en 0 labelCounts[currentLabel] += 1 #aumenta en uno el valor de la", "- newEntropy #Calcula la ganancia if (infoGain > bestInfoGain): #Compara la ganancia con", "math import log import operator def createDataSet(): #Funcion que retorna un mequeño dataset", "= valueOfFeat #en caso contrario, el valor de la etiqueta de clase corresponde", "las etiquetas de las mejores caracteristicas del vector de etiquetas, en resumen, dividió", "#Obtiene un conjunto con todos los valores únicos para esa caracteristica dada newEntropy", "createTree(dataSet,labels): #Esta funcion se encarga de la creación del arbol de manera recursiva", "valores únicos para esa caracteristica dada newEntropy = 0.0 #Inicializa el valor de", "newEntropy #Calcula la ganancia if (infoGain > bestInfoGain): #Compara la ganancia con la", "open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga un arbol de un archivo", "del algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene el numero de caracteristicas (columnas).", "todas las filas del dataset cuya columna (axis) sea igual al valor dado", "len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma", "def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para dar el resultado de la", "filas del dataset if featVec[axis] == value: #comprueba si cumple con la condicion", "= 0 #Comprueba si ya se encuentra la clase actual en la variable", "in labelCounts: #En este for calcula la probabilidad de cada una de las", "dada newEntropy = 0.0 #Inicializa el valor de la entropia for value in", "cuya llave es la de la etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina", "arbol en un archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename):", "bestInfoGain = infoGain #Si es mejor que la de la variable, asigna esta", "#Calcula la ganancia if (infoGain > bestInfoGain): #Compara la ganancia con la de", "caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto newEntropy +=", "que todas las clases son iguales por lo que ahi termina la recursividad", "de manera recursiva classList = [example[-1] for example in dataSet] #La lista de", "valor del ultimo elemento del vector de caracteristicas if currentLabel not in labelCounts.keys():", "mejor que la de la variable, asigna esta como la nueva mejor bestFeature", "= infoGain #Si es mejor que la de la variable, asigna esta como", "uniqueVals = set(featValues) #aqui obtiene los valores únicos de esta caractetistica for value", "es decir, la clase mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga de la", "los valores para la mejor ganancia y la mejor caracteristica for i in", "= len(dataSet[0]) - 1 #Obtiene el numero de caracteristicas (columnas). Es importante recalcar", "lista de clases if vote not in classCount.keys(): classCount[vote] = 0 #En caso", "obtiene los valores únicos de esta caractetistica for value in uniqueVals: #for que", "de mejor ganancia bestInfoGain = infoGain #Si es mejor que la de la", "la etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa del dataset", "entre los subarboles estarian accediendo a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat,", "de arboles featIndex = featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene aquella que", "operator def createDataSet(): #Funcion que retorna un mequeño dataset dataSet = [[1, 1,", "la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto newEntropy", "clases es equivalente al tamaño de la lista de clases return classList[0]#En caso", "con la condicion dada como parametro reducedFeatVec = featVec[:axis] #crea una fila con", "entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese subarbol classLabel = classify(valueOfFeat,", "caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya se", "labels = ['no surfacing','flippers'] #change to discrete values return dataSet, labels def calcShannonEnt(dataSet):", "diccionario y la establece en 0 labelCounts[currentLabel] += 1 #aumenta en uno el", "in range(numFeatures): #for que recorre todas las caracteristicas featList = [example[i] for example", "+= prob * calcShannonEnt(subDataSet) #Suma la entropia dada para este subconjunto infoGain =", "llave es la de la etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina las", "= open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga un arbol de un", "las filas que tienen la caracteristica \"i\" uniqueVals = set(featList) #Obtiene un conjunto", "cero classCount[vote] += 1 #suma uno al valor de la key sortedClassCount =", "valor de la caracteristica return classLabel #retorna la etiqueta de clase def storeTree(inputTree,filename):", "#Obtiene el numero de caracteristicas (columnas). Es importante recalcar que la ultima columna", "si mismo con los nuevos valores del arbol (recursividad) else: classLabel = valueOfFeat", "donde se guarda la cuenta de cada clase for featVec in dataSet: #Obtiene", "classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para dar el resultado de la clasificacion,", "fila con todos los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos", "variable de labelCounts, en caso de que no, añade la key al diccionario", "#aqui calcula la entropia y la añade a la suma, utiliza logaritmo base", "caracteristica. def majorityCnt(classList): #Funcion que retorna el valor de la clase mayoritaria classCount={}", "la columna del axis retDataSet.append(reducedFeatVec) #agrega la fila a la matriz de retorno", "dataset, por eso la asigna a classList if classList.count(classList[0]) == len(classList): #Comprueba si", "#copia todas las etiqutas, pues si enviara tal cual el valor de labels,", "obtiene el valor de la primer llave, es decir, el valor de la", "[0, 1, 'no']] labels = ['no surfacing','flippers'] #change to discrete values return dataSet,", "la de mejor ganancia bestInfoGain = infoGain #Si es mejor que la de", "reverse=True) #Ordena de mayor a menor return sortedClassCount[0][0] #retorna el primer elemento, es", "#for que recorre cada uno de las filas del dataset if featVec[axis] ==", "de las filas del dataset if featVec[axis] == value: #comprueba si cumple con", "la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor return", "probabilidad shannonEnt -= prob * log(prob,2) #aqui calcula la entropia y la añade", "de etiquetas, en resumen, dividió las etiquetas en 2 featValues = [example[bestFeat] for", "axis retDataSet.append(reducedFeatVec) #agrega la fila a la matriz de retorno return retDataSet #retorna", "de clases if vote not in classCount.keys(): classCount[vote] = 0 #En caso de", "[example[i] for example in dataSet]#Crea una lista con todas las filas que tienen", "def splitDataSet(dataSet, axis, value): #Funcion que retorna todas las filas del dataset cuya", "for featVec in dataSet: #for que recorre cada uno de las filas del", "def majorityCnt(classList): #Funcion que retorna el valor de la clase mayoritaria classCount={} #Diccionario", "si, significa que todas las clases son iguales por lo que ahi termina", "el índice de la mejor caracteristica. def majorityCnt(classList): #Funcion que retorna el valor", "valor de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para", "subarbol return myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol", "if vote not in classCount.keys(): classCount[vote] = 0 #En caso de que no", "ser un conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro del vector de caracteristicas,", "del vector de entrada valueOfFeat = secondDict[key] #Se adentra al arbol, obteniendo el", "Es importante recalcar que la ultima columna pertenece a la etiqueta de clase", "mejor caracteristica subLabels = labels[:] #copia todas las etiqutas, pues si enviara tal", "currentLable al valor del ultimo elemento del vector de caracteristicas if currentLabel not", "[example[bestFeat] for example in dataSet] #Obtiene los valores de la mejor caracteristica para", "como la nueva mejor bestFeature = i #Tambien guarda el valor de i,", "los subarboles estarian accediendo a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels)", "de que si, se llama a si mismo con los nuevos valores del", "al valor dado retDataSet = [] #variable donde se retornará la division for", "#retorna la entropia def splitDataSet(dataSet, axis, value): #Funcion que retorna todas las filas", "no, añade la key al diccionario y la establece en 0 labelCounts[currentLabel] +=", "recalcar que la ultima columna pertenece a la etiqueta de clase baseEntropy =", "0.0 #Inicializa el valor de la entropia for value in uniqueVals: #For que", "(axis) sea igual al valor dado retDataSet = [] #variable donde se retornará", "creación del arbol de manera recursiva classList = [example[-1] for example in dataSet]", "to discrete values return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula la entropia", "caracteristica dada por el vector de entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente", "de la caracteristica return classLabel #retorna la etiqueta de clase def storeTree(inputTree,filename): #Funcion", "y la añade a la suma, utiliza logaritmo base 2 return shannonEnt #retorna", "menos la columna del axis retDataSet.append(reducedFeatVec) #agrega la fila a la matriz de", "de que no exista la key, la añade como cero classCount[vote] += 1", "valor de labels, entre los subarboles estarian accediendo a la misma variable myTree[bestFeatLabel][value]", "retDataSet = [] #variable donde se retornará la division for featVec in dataSet:", "entropia completa del dataset bestInfoGain = 0.0; bestFeature = -1 #inicializa los valores", "caso contrario, el valor de la etiqueta de clase corresponde al valor de", "datos segun el valor de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la", "corresponde a la caracteristica dada por el vector de entrada if isinstance(valueOfFeat, dict):", "fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga un arbol de", "del dataset labelCounts = {} #Diccionario donde se guarda la cuenta de cada", "featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene aquella que corresponde con la del", "todos los valores únicos para esa caracteristica dada newEntropy = 0.0 #Inicializa el", "etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa del dataset bestInfoGain", "la entropia dada para este subconjunto infoGain = baseEntropy - newEntropy #Calcula la", "in uniqueVals: #for que recorre los valores unicos de la mejor caracteristica subLabels", "que no exista la key, la añade como cero classCount[vote] += 1 #suma", "con la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de las mejores caracteristicas del", "columna del dataset, por eso la asigna a classList if classList.count(classList[0]) == len(classList):", "subLabels = labels[:] #copia todas las etiqutas, pues si enviara tal cual el", "del nivel actual del arbol key = testVec[featIndex] #Obtiene el valor de la", "diccionarios, con esto obtiene el valor de la primer llave, es decir, el", "grabTree(filename): #Funcion que carga un arbol de un archivo import pickle fr =", "los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la", "newEntropy += prob * calcShannonEnt(subDataSet) #Suma la entropia dada para este subconjunto infoGain", "la variable de labelCounts, en caso de que no, añade la key al", "def calcShannonEnt(dataSet): #Esta funcion calcula la entropia de shannon del dataset numEntries =", "clases if vote not in classCount.keys(): classCount[vote] = 0 #En caso de que", "al valor de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor", "si el numero de clases es equivalente al tamaño de la lista de", "#Obtiene la probabilidad de este subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma la", "la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir bestFeatLabel", "que la ultima columna pertenece a la etiqueta de clase baseEntropy = calcShannonEnt(dataSet)", "dataset cuya columna (axis) sea igual al valor dado retDataSet = [] #variable", "que ahi termina la recursividad y retorna la clase if len(dataSet[0]) == 1:", "retorno return retDataSet #retorna todos aquellos datos segun el valor de la columna", "se encarga de la creación del arbol de manera recursiva classList = [example[-1]", "la misma manera, si no hay más caracteristicas en el dataset, deja de", "caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya llave es la", "poder hacer las siguientes divisiones uniqueVals = set(featValues) #aqui obtiene los valores únicos", "a classList if classList.count(classList[0]) == len(classList): #Comprueba si el numero de clases es", "el valor de la entropia for value in uniqueVals: #For que recorre todos", "la cuenta de cada clase for featVec in dataSet: #Obtiene los elementos únicos", "significa que todas las clases son iguales por lo que ahi termina la", "#Comprueba si ya se encuentra la clase actual en la variable de labelCounts,", "dataSet] #La lista de clases pertenece a la ultima columna del dataset, por", "los valores únicos de la mejor etiqueta el siguiente subarbol return myTree #retorna", "recorre todas las caracteristicas featList = [example[i] for example in dataSet]#Crea una lista", "el valor de i, es decir, el índice de la mejor caracteristica return", "= len(dataSet) #Obtiene el numero de filas del dataset labelCounts = {} #Diccionario", "myTree = {bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya llave es la de", "dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula la entropia de shannon del dataset", "todas las etiqutas, pues si enviara tal cual el valor de labels, entre", "pertenece a la ultima columna del dataset, por eso la asigna a classList", "que carga un arbol de un archivo import pickle fr = open(filename,'rb') return", "classList if classList.count(classList[0]) == len(classList): #Comprueba si el numero de clases es equivalente", "#change to discrete values return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula la", "pertenece a la etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa", "la de la etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de", "lo añade a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt", "retDataSet #retorna todos aquellos datos segun el valor de la columna dado def", "las etiquetas en 2 featValues = [example[bestFeat] for example in dataSet] #Obtiene los", "corresponde con la del arbol del vector de entrada valueOfFeat = secondDict[key] #Se", "a la caracteristica dada por el vector de entrada if isinstance(valueOfFeat, dict): #Comprueba", "arbol para dar el resultado de la clasificacion, tambien es una funcion recursiva", "es mejor que la de la variable, asigna esta como la nueva mejor", "arbol es un diccionario de diccionarios, con esto obtiene el valor de la", "de las mejores caracteristicas del vector de etiquetas, en resumen, dividió las etiquetas", "algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene el numero de caracteristicas (columnas). Es", "la entropia en ceros for key in labelCounts: #En este for calcula la", "labels, entre los subarboles estarian accediendo a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet,", "sus caracteristicas currentLabel = featVec[-1] #Establece currentLable al valor del ultimo elemento del", "featList = [example[i] for example in dataSet]#Crea una lista con todas las filas", "el valor de la etiqueta de clase corresponde al valor de la caracteristica", "valor de la etiqueta de clase corresponde al valor de la caracteristica return", "conjunto con todos los valores únicos para esa caracteristica dada newEntropy = 0.0", "#Inicializa el valor de la entropia for value in uniqueVals: #For que recorre", "de aquellos con la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario de", "funcion se encarga de la creación del arbol de manera recursiva classList =", "que si, significa que todas las clases son iguales por lo que ahi", "#Esta es la funcion nucleo del algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene", "dividirse return majorityCnt(classList) #Retorna el valor de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet)", "+= 1 #suma uno al valor de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1),", "por el vector de entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese", "la ganancia if (infoGain > bestInfoGain): #Compara la ganancia con la de mejor", "la cuenta de cada clase for vote in classList: #For que recorre la", "#Funcion que retorna un mequeño dataset dataSet = [[1, 1, 'yes'], [1, 1,", "#suma uno al valor de la key sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena", "de la lista de clases return classList[0]#En caso de que si, significa que", "con los nuevos valores del arbol (recursividad) else: classLabel = valueOfFeat #en caso", "featVec[-1] #Establece currentLable al valor del ultimo elemento del vector de caracteristicas if", "de labels, entre los subarboles estarian accediendo a la misma variable myTree[bestFeatLabel][value] =", "#Obtiene el valor de la caracteristica que corresponde con la del arbol del", "arbol del vector de entrada valueOfFeat = secondDict[key] #Se adentra al arbol, obteniendo", "mejor caracteristica for i in range(numFeatures): #for que recorre todas las caracteristicas featList", "en 0 labelCounts[currentLabel] += 1 #aumenta en uno el valor de la clase", "fw.close() def grabTree(filename): #Funcion que carga un arbol de un archivo import pickle", "añade la key al diccionario y la establece en 0 labelCounts[currentLabel] += 1", "ultima columna pertenece a la etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula la", "#De la misma manera, si no hay más caracteristicas en el dataset, deja", "set(featValues) #aqui obtiene los valores únicos de esta caractetistica for value in uniqueVals:", "la caracteristica dada por el vector de entrada if isinstance(valueOfFeat, dict): #Comprueba si", "def createDataSet(): #Funcion que retorna un mequeño dataset dataSet = [[1, 1, 'yes'],", "caracteristicas (columnas). Es importante recalcar que la ultima columna pertenece a la etiqueta", "featValues = [example[bestFeat] for example in dataSet] #Obtiene los valores de la mejor", "return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula la entropia de shannon del", "todos los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos", "manera recursiva classList = [example[-1] for example in dataSet] #La lista de clases", "majorityCnt(classList): #Funcion que retorna el valor de la clase mayoritaria classCount={} #Diccionario donde", "tal cual el valor de labels, entre los subarboles estarian accediendo a la", "caracteristicas featList = [example[i] for example in dataSet]#Crea una lista con todas las", "0 labelCounts[currentLabel] += 1 #aumenta en uno el valor de la clase shannonEnt", "currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya se encuentra la", "la probabilidad de este subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma la entropia", "dada para este subconjunto infoGain = baseEntropy - newEntropy #Calcula la ganancia if", "valor de la clase mayoritaria classCount={} #Diccionario donde se guardará la cuenta de", "los valores unicos de la mejor caracteristica subLabels = labels[:] #copia todas las", "obteniendo el siguiente arbol que corresponde a la caracteristica dada por el vector", "testVec) #En caso de que si, se llama a si mismo con los", "de la caracteristica que corresponde con la del arbol del vector de entrada", "labelCounts: #En este for calcula la probabilidad de cada una de las etiquetas", "bestInfoGain): #Compara la ganancia con la de mejor ganancia bestInfoGain = infoGain #Si", "retorna el índice de la mejor caracteristica. def majorityCnt(classList): #Funcion que retorna el", "este for calcula la probabilidad de cada una de las etiquetas y lo", "las clases son iguales por lo que ahi termina la recursividad y retorna", "recorre un arbol para dar el resultado de la clasificacion, tambien es una", "= testVec[featIndex] #Obtiene el valor de la caracteristica que corresponde con la del", "en ceros for key in labelCounts: #En este for calcula la probabilidad de", "la variable, asigna esta como la nueva mejor bestFeature = i #Tambien guarda", "únicos de la mejor etiqueta el siguiente subarbol return myTree #retorna el arbol", "mayoritaria classCount={} #Diccionario donde se guardará la cuenta de cada clase for vote", "#aqui obtiene los valores únicos de esta caractetistica for value in uniqueVals: #for", "la primer llave, es decir, el valor de la etiqueta secondDict = inputTree[firstStr]", "uno de los valores únicos de la mejor etiqueta el siguiente subarbol return", "for example in dataSet] #La lista de clases pertenece a la ultima columna", "un conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro del vector de caracteristicas, obtiene", "con la del nivel actual del arbol key = testVec[featIndex] #Obtiene el valor", "columna pertenece a la etiqueta de clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia", "con la de mejor ganancia bestInfoGain = infoGain #Si es mejor que la", "valores para la mejor ganancia y la mejor caracteristica for i in range(numFeatures):", "'no']] labels = ['no surfacing','flippers'] #change to discrete values return dataSet, labels def", "es la de la etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas", "la suma, utiliza logaritmo base 2 return shannonEnt #retorna la entropia def splitDataSet(dataSet,", "la clase mayoritaria classCount={} #Diccionario donde se guardará la cuenta de cada clase", "not in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya se encuentra la clase", "from math import log import operator def createDataSet(): #Funcion que retorna un mequeño", "en caso de que no, añade la key al diccionario y la establece", "#aqui calcula la probabilidad shannonEnt -= prob * log(prob,2) #aqui calcula la entropia", "#añade los elementos restantes menos la columna del axis retDataSet.append(reducedFeatVec) #agrega la fila", "únicos de la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene el subconjunto", "la mejor caracteristica subLabels = labels[:] #copia todas las etiqutas, pues si enviara", "#For que recorre todos los valores únicos de la caracteristica \"i\" subDataSet =", "que corresponde a la caracteristica dada por el vector de entrada if isinstance(valueOfFeat,", "['no surfacing','flippers'] #change to discrete values return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion", "son iguales por lo que ahi termina la recursividad y retorna la clase", "clases pertenece a la ultima columna del dataset, por eso la asigna a", "valores del arbol (recursividad) else: classLabel = valueOfFeat #en caso contrario, el valor", "def grabTree(filename): #Funcion que carga un arbol de un archivo import pickle fr", "el vector de entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese subarbol", "dataSet] #Obtiene los valores de la mejor caracteristica para despues poder hacer las", "#agrega la fila a la matriz de retorno return retDataSet #retorna todos aquellos", "import operator def createDataSet(): #Funcion que retorna un mequeño dataset dataSet = [[1,", "columna (axis) sea igual al valor dado retDataSet = [] #variable donde se", "de la mejor caracteristica para despues poder hacer las siguientes divisiones uniqueVals =", "de entrada if isinstance(valueOfFeat, dict): #Comprueba si realmente existe ese subarbol classLabel =", "la lista de clases return classList[0]#En caso de que si, significa que todas", "que recorre un arbol para dar el resultado de la clasificacion, tambien es", "division for featVec in dataSet: #for que recorre cada uno de las filas", "a la matriz de retorno return retDataSet #retorna todos aquellos datos segun el", "y retorna la clase if len(dataSet[0]) == 1: #De la misma manera, si", "log import operator def createDataSet(): #Funcion que retorna un mequeño dataset dataSet =", "parametro reducedFeatVec = featVec[:axis] #crea una fila con todos los datos hasta antes", "if len(dataSet[0]) == 1: #De la misma manera, si no hay más caracteristicas", "equivalente al tamaño de la lista de clases return classList[0]#En caso de que", "del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la columna del axis retDataSet.append(reducedFeatVec)", "caracteristica for i in range(numFeatures): #for que recorre todas las caracteristicas featList =", "la caracteristica \"i\" uniqueVals = set(featList) #Obtiene un conjunto con todos los valores", "condicion dada como parametro reducedFeatVec = featVec[:axis] #crea una fila con todos los", "0.0 #Inicializa la entropia en ceros for key in labelCounts: #En este for", "primer llave, es decir, el valor de la etiqueta secondDict = inputTree[firstStr] #Aqui", "return shannonEnt #retorna la entropia def splitDataSet(dataSet, axis, value): #Funcion que retorna todas", "= 0.0 #Inicializa la entropia en ceros for key in labelCounts: #En este", "clasificacion, tambien es una funcion recursiva firstStr = list(inputTree)[0] #como el arbol es", "dataSet: #Obtiene los elementos únicos con sus caracteristicas currentLabel = featVec[-1] #Establece currentLable", "con todos los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes", "logaritmo base 2 return shannonEnt #retorna la entropia def splitDataSet(dataSet, axis, value): #Funcion", "valor de la entropia for value in uniqueVals: #For que recorre todos los", "recursiva classList = [example[-1] for example in dataSet] #La lista de clases pertenece", "por eso la asigna a classList if classList.count(classList[0]) == len(classList): #Comprueba si el", "las etiquetas y lo añade a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula", "classCount[vote] += 1 #suma uno al valor de la key sortedClassCount = sorted(classCount.iteritems(),", "dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de aquellos con la mejor caracteristica", "= set(featValues) #aqui obtiene los valores únicos de esta caractetistica for value in", "de entrada valueOfFeat = secondDict[key] #Se adentra al arbol, obteniendo el siguiente arbol", "una funcion recursiva firstStr = list(inputTree)[0] #como el arbol es un diccionario de", "myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para dar", "#Comprueba si el numero de clases es equivalente al tamaño de la lista", "firstStr = list(inputTree)[0] #como el arbol es un diccionario de diccionarios, con esto", "al diccionario y la establece en 0 labelCounts[currentLabel] += 1 #aumenta en uno", "la establece en 0 labelCounts[currentLabel] += 1 #aumenta en uno el valor de", "dado retDataSet = [] #variable donde se retornará la division for featVec in", "ganancia y la mejor caracteristica for i in range(numFeatures): #for que recorre todas", "> bestInfoGain): #Compara la ganancia con la de mejor ganancia bestInfoGain = infoGain", "in classCount.keys(): classCount[vote] = 0 #En caso de que no exista la key,", "del dataset cuya columna (axis) sea igual al valor dado retDataSet = []", "numero de filas del dataset labelCounts = {} #Diccionario donde se guarda la", "si no hay más caracteristicas en el dataset, deja de dividirse return majorityCnt(classList)", "= set(featList) #Obtiene un conjunto con todos los valores únicos para esa caracteristica", "ganancia con la de mejor ganancia bestInfoGain = infoGain #Si es mejor que", "return majorityCnt(classList) #Retorna el valor de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene", "que recorre la lista de clases if vote not in classCount.keys(): classCount[vote] =", "del ultimo elemento del vector de caracteristicas if currentLabel not in labelCounts.keys(): labelCounts[currentLabel]", "for key in labelCounts: #En este for calcula la probabilidad de cada una", "bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene", "si, se llama a si mismo con los nuevos valores del arbol (recursividad)", "#Funcion que retorna todas las filas del dataset cuya columna (axis) sea igual", "secondDict[key] #Se adentra al arbol, obteniendo el siguiente arbol que corresponde a la", "en uno el valor de la clase shannonEnt = 0.0 #Inicializa la entropia", "return retDataSet #retorna todos aquellos datos segun el valor de la columna dado", "#Obtiene los elementos únicos con sus caracteristicas currentLabel = featVec[-1] #Establece currentLable al", "calcShannonEnt(dataSet): #Esta funcion calcula la entropia de shannon del dataset numEntries = len(dataSet)", "#En este for calcula la probabilidad de cada una de las etiquetas y", "realmente existe ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En caso de que", "#inicializa los valores para la mejor ganancia y la mejor caracteristica for i", "de la mejor caracteristica subLabels = labels[:] #copia todas las etiqutas, pues si", "{bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya llave es la de la etiqueta", "que si, se llama a si mismo con los nuevos valores del arbol", "i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este subconjunto newEntropy += prob", "las etiquetas de aquellos con la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un", "bestFeat, value),subLabels) #asigna por cada uno de los valores únicos de la mejor", "infoGain #Si es mejor que la de la variable, asigna esta como la", "list(inputTree)[0] #como el arbol es un diccionario de diccionarios, con esto obtiene el", "los valores únicos de la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene", "resultado de la clasificacion, tambien es una funcion recursiva firstStr = list(inputTree)[0] #como", "de la clasificacion, tambien es una funcion recursiva firstStr = list(inputTree)[0] #como el", "classify(valueOfFeat, featLabels, testVec) #En caso de que si, se llama a si mismo", "len(classList): #Comprueba si el numero de clases es equivalente al tamaño de la", "reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la columna del axis retDataSet.append(reducedFeatVec) #agrega la", "recorre los valores unicos de la mejor caracteristica subLabels = labels[:] #copia todas", "retorna la clase if len(dataSet[0]) == 1: #De la misma manera, si no", "de la columna dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del algoritmo", "matriz de retorno return retDataSet #retorna todos aquellos datos segun el valor de", "de dividirse return majorityCnt(classList) #Retorna el valor de la clase mayoritaria bestFeat =", "es una funcion recursiva firstStr = list(inputTree)[0] #como el arbol es un diccionario", "prob = float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -= prob * log(prob,2) #aqui", "para la mejor ganancia y la mejor caracteristica for i in range(numFeatures): #for", "la key, la añade como cero classCount[vote] += 1 #suma uno al valor", "caso de que no, añade la key al diccionario y la establece en", "como parametro reducedFeatVec = featVec[:axis] #crea una fila con todos los datos hasta", "caso de que si, significa que todas las clases son iguales por lo", "el valor de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica", "diccionario de diccionarios cuya llave es la de la etiqueta con la mejor", "1 #aumenta en uno el valor de la clase shannonEnt = 0.0 #Inicializa", "= labels[:] #copia todas las etiqutas, pues si enviara tal cual el valor", "filas que tienen la caracteristica \"i\" uniqueVals = set(featList) #Obtiene un conjunto con", "los valores únicos para esa caracteristica dada newEntropy = 0.0 #Inicializa el valor", "caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene el subconjunto de datos con", "clase mayoritaria classCount={} #Diccionario donde se guardará la cuenta de cada clase for", "en un archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion", "de la primer llave, es decir, el valor de la etiqueta secondDict =", "la primera llave, que viene a ser un conjunto de arboles featIndex =", "variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno de los valores", "únicos con sus caracteristicas currentLabel = featVec[-1] #Establece currentLable al valor del ultimo", "el numero de clases es equivalente al tamaño de la lista de clases", "key, la añade como cero classCount[vote] += 1 #suma uno al valor de", "base 2 return shannonEnt #retorna la entropia def splitDataSet(dataSet, axis, value): #Funcion que", "axis, value): #Funcion que retorna todas las filas del dataset cuya columna (axis)", "divisiones uniqueVals = set(featValues) #aqui obtiene los valores únicos de esta caractetistica for", "accediendo a la misma variable myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada", "por cada uno de los valores únicos de la mejor etiqueta el siguiente", "values return dataSet, labels def calcShannonEnt(dataSet): #Esta funcion calcula la entropia de shannon", "al valor de la caracteristica return classLabel #retorna la etiqueta de clase def", "featLabels, testVec) #En caso de que si, se llama a si mismo con", "= 0.0; bestFeature = -1 #inicializa los valores para la mejor ganancia y", "baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa del dataset bestInfoGain = 0.0; bestFeature", "log(prob,2) #aqui calcula la entropia y la añade a la suma, utiliza logaritmo", "columna del axis retDataSet.append(reducedFeatVec) #agrega la fila a la matriz de retorno return", "#Establece currentLable al valor del ultimo elemento del vector de caracteristicas if currentLabel", "caso de que no exista la key, la añade como cero classCount[vote] +=", "que corresponde con la del arbol del vector de entrada valueOfFeat = secondDict[key]", "subconjunto de datos con la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad", "nueva mejor bestFeature = i #Tambien guarda el valor de i, es decir,", "[[1, 1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0, 1, 'no'], [0,", "de diccionarios, con esto obtiene el valor de la primer llave, es decir,", "in classList: #For que recorre la lista de clases if vote not in", "mayor a menor return sortedClassCount[0][0] #retorna el primer elemento, es decir, la clase", "con todos los valores únicos para esa caracteristica dada newEntropy = 0.0 #Inicializa", "2 return shannonEnt #retorna la entropia def splitDataSet(dataSet, axis, value): #Funcion que retorna", "valor de la caracteristica que corresponde con la del arbol del vector de", "que retorna todas las filas del dataset cuya columna (axis) sea igual al", "encuentra la clase actual en la variable de labelCounts, en caso de que", "reducedFeatVec = featVec[:axis] #crea una fila con todos los datos hasta antes del", "el primer elemento, es decir, la clase mayoritaria def createTree(dataSet,labels): #Esta funcion se", "valores unicos de la mejor caracteristica subLabels = labels[:] #copia todas las etiqutas,", "#Obtiene el numero de filas del dataset labelCounts = {} #Diccionario donde se", "todos los valores únicos de la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i, value)", "0, 'no'], [0, 1, 'no'], [0, 1, 'no']] labels = ['no surfacing','flippers'] #change", "retorna un mequeño dataset dataSet = [[1, 1, 'yes'], [1, 1, 'yes'], [1,", "la etiqueta con la mejor caracteristica del(labels[bestFeat]) #elimina las etiquetas de las mejores", "= i #Tambien guarda el valor de i, es decir, el índice de", "for value in uniqueVals: #for que recorre los valores unicos de la mejor", "'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0, 1, 'no'], [0, 1, 'no']]", "de clases pertenece a la ultima columna del dataset, por eso la asigna", "primera llave, que viene a ser un conjunto de arboles featIndex = featLabels.index(firstStr)", "splitDataSet(dataSet, i, value) #Obtiene el subconjunto de datos con la caracteristica i prob", "que recorre los valores unicos de la mejor caracteristica subLabels = labels[:] #copia", "newEntropy = 0.0 #Inicializa el valor de la entropia for value in uniqueVals:", "con la mejor caracteristica myTree = {bestFeatLabel:{}} #Crea un diccionario de diccionarios cuya", "createDataSet(): #Funcion que retorna un mequeño dataset dataSet = [[1, 1, 'yes'], [1,", "cumple con la condicion dada como parametro reducedFeatVec = featVec[:axis] #crea una fila", "iguales por lo que ahi termina la recursividad y retorna la clase if", "que corresponde con la del nivel actual del arbol key = testVec[featIndex] #Obtiene", "uno el valor de la clase shannonEnt = 0.0 #Inicializa la entropia en", "de la clase shannonEnt = 0.0 #Inicializa la entropia en ceros for key", "el valor de la clase mayoritaria classCount={} #Diccionario donde se guardará la cuenta", "= 0 #En caso de que no exista la key, la añade como", "del dataset, por eso la asigna a classList if classList.count(classList[0]) == len(classList): #Comprueba", "in dataSet: #Obtiene los elementos únicos con sus caracteristicas currentLabel = featVec[-1] #Establece", "del arbol (recursividad) else: classLabel = valueOfFeat #en caso contrario, el valor de", "a menor return sortedClassCount[0][0] #retorna el primer elemento, es decir, la clase mayoritaria", "calcula la entropia de shannon del dataset numEntries = len(dataSet) #Obtiene el numero", "la etiqueta de clase corresponde al valor de la caracteristica return classLabel #retorna", "dataSet = [[1, 1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0, 1,", "key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor return sortedClassCount[0][0] #retorna el primer elemento,", "para dar el resultado de la clasificacion, tambien es una funcion recursiva firstStr", "caracteristica del(labels[bestFeat]) #elimina las etiquetas de las mejores caracteristicas del vector de etiquetas,", "len(dataSet[0]) - 1 #Obtiene el numero de caracteristicas (columnas). Es importante recalcar que", "primer elemento, es decir, la clase mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga", "classList[0]#En caso de que si, significa que todas las clases son iguales por", "* log(prob,2) #aqui calcula la entropia y la añade a la suma, utiliza", "etiquetas en 2 featValues = [example[bestFeat] for example in dataSet] #Obtiene los valores", "#Funcion que recorre un arbol para dar el resultado de la clasificacion, tambien", "= {} #Diccionario donde se guarda la cuenta de cada clase for featVec", "si ya se encuentra la clase actual en la variable de labelCounts, en", "1, 'no']] labels = ['no surfacing','flippers'] #change to discrete values return dataSet, labels", "el valor de la etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el valor de", "#retorna el primer elemento, es decir, la clase mayoritaria def createTree(dataSet,labels): #Esta funcion", "if (infoGain > bestInfoGain): #Compara la ganancia con la de mejor ganancia bestInfoGain", "for i in range(numFeatures): #for que recorre todas las caracteristicas featList = [example[i]", "por lo que ahi termina la recursividad y retorna la clase if len(dataSet[0])", "la entropia y la añade a la suma, utiliza logaritmo base 2 return", "hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la columna del", "de la mejor caracteristica. def majorityCnt(classList): #Funcion que retorna el valor de la", "dict): #Comprueba si realmente existe ese subarbol classLabel = classify(valueOfFeat, featLabels, testVec) #En", "cada una de las etiquetas y lo añade a la \"suma\" prob =", "las mejores caracteristicas del vector de etiquetas, en resumen, dividió las etiquetas en", "= [example[-1] for example in dataSet] #La lista de clases pertenece a la", "etiqueta secondDict = inputTree[firstStr] #Aqui obtiene el valor de la primera llave, que", "con la del arbol del vector de entrada valueOfFeat = secondDict[key] #Se adentra", "classCount={} #Diccionario donde se guardará la cuenta de cada clase for vote in", "eso la asigna a classList if classList.count(classList[0]) == len(classList): #Comprueba si el numero", "que recorre todas las caracteristicas featList = [example[i] for example in dataSet]#Crea una", "guarda un arbol en un archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close()", "la mejor caracteristica para despues poder hacer las siguientes divisiones uniqueVals = set(featValues)", "clase for featVec in dataSet: #Obtiene los elementos únicos con sus caracteristicas currentLabel", "splitDataSet(dataSet, axis, value): #Funcion que retorna todas las filas del dataset cuya columna", "utiliza logaritmo base 2 return shannonEnt #retorna la entropia def splitDataSet(dataSet, axis, value):", "[] #variable donde se retornará la division for featVec in dataSet: #for que", "i, es decir, el índice de la mejor caracteristica return bestFeature #Al final,", "de las etiquetas y lo añade a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui", "archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga", "= ['no surfacing','flippers'] #change to discrete values return dataSet, labels def calcShannonEnt(dataSet): #Esta", "mejor bestFeature = i #Tambien guarda el valor de i, es decir, el", "a la ultima columna del dataset, por eso la asigna a classList if", "caracteristicas en el dataset, deja de dividirse return majorityCnt(classList) #Retorna el valor de", "dataSet]#Crea una lista con todas las filas que tienen la caracteristica \"i\" uniqueVals", "dividió las etiquetas en 2 featValues = [example[bestFeat] for example in dataSet] #Obtiene", "#crea una fila con todos los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade", "a la suma, utiliza logaritmo base 2 return shannonEnt #retorna la entropia def", "nucleo del algoritmo numFeatures = len(dataSet[0]) - 1 #Obtiene el numero de caracteristicas", "la nueva mejor bestFeature = i #Tambien guarda el valor de i, es", "value in uniqueVals: #For que recorre todos los valores únicos de la caracteristica", "los nuevos valores del arbol (recursividad) else: classLabel = valueOfFeat #en caso contrario,", "guarda el valor de i, es decir, el índice de la mejor caracteristica", "con todas las filas que tienen la caracteristica \"i\" uniqueVals = set(featList) #Obtiene", "sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor return sortedClassCount[0][0] #retorna el primer", "únicos de esta caractetistica for value in uniqueVals: #for que recorre los valores", "nuevos valores del arbol (recursividad) else: classLabel = valueOfFeat #en caso contrario, el", "ya se encuentra la clase actual en la variable de labelCounts, en caso", "= labels[bestFeat] #Obtiene las etiquetas de aquellos con la mejor caracteristica myTree =", "ahi termina la recursividad y retorna la clase if len(dataSet[0]) == 1: #De", "se guarda la cuenta de cada clase for featVec in dataSet: #Obtiene los", "calcula la probabilidad de cada una de las etiquetas y lo añade a", "un archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que", "se llama a si mismo con los nuevos valores del arbol (recursividad) else:", "siguiente subarbol return myTree #retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un", "#La lista de clases pertenece a la ultima columna del dataset, por eso", "en el dataset, deja de dividirse return majorityCnt(classList) #Retorna el valor de la", "dataSet: #for que recorre cada uno de las filas del dataset if featVec[axis]", "es un diccionario de diccionarios, con esto obtiene el valor de la primer", "para dividir bestFeatLabel = labels[bestFeat] #Obtiene las etiquetas de aquellos con la mejor", "el arbol es un diccionario de diccionarios, con esto obtiene el valor de", "#retorna el arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para dar el", "contrario, el valor de la etiqueta de clase corresponde al valor de la", "asigna a classList if classList.count(classList[0]) == len(classList): #Comprueba si el numero de clases", "de filas del dataset labelCounts = {} #Diccionario donde se guarda la cuenta", "pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga un arbol de un archivo import", "para esa caracteristica dada newEntropy = 0.0 #Inicializa el valor de la entropia", "el subconjunto de datos con la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la", "del arbol del vector de entrada valueOfFeat = secondDict[key] #Se adentra al arbol,", "la matriz de retorno return retDataSet #retorna todos aquellos datos segun el valor", "de retorno return retDataSet #retorna todos aquellos datos segun el valor de la", "a ser un conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro del vector de", "[1, 1, 'yes'], [1, 0, 'no'], [0, 1, 'no'], [0, 1, 'no']] labels", "in dataSet] #La lista de clases pertenece a la ultima columna del dataset,", "classCount[vote] = 0 #En caso de que no exista la key, la añade", "dado def chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del algoritmo numFeatures = len(dataSet[0])", "misma manera, si no hay más caracteristicas en el dataset, deja de dividirse", "las siguientes divisiones uniqueVals = set(featValues) #aqui obtiene los valores únicos de esta", "= sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) #Ordena de mayor a menor return sortedClassCount[0][0] #retorna el", "calcShannonEnt(subDataSet) #Suma la entropia dada para este subconjunto infoGain = baseEntropy - newEntropy", "value),subLabels) #asigna por cada uno de los valores únicos de la mejor etiqueta", "len(dataSet) #Obtiene el numero de filas del dataset labelCounts = {} #Diccionario donde", "#elimina las etiquetas de las mejores caracteristicas del vector de etiquetas, en resumen,", "datos con la caracteristica i prob = len(subDataSet)/float(len(dataSet)) #Obtiene la probabilidad de este", "y la mejor caracteristica for i in range(numFeatures): #for que recorre todas las", "recorre todos los valores únicos de la caracteristica \"i\" subDataSet = splitDataSet(dataSet, i,", "#Retorna el valor de la clase mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor", "de cada clase for vote in classList: #For que recorre la lista de", "mayoritaria bestFeat = chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir bestFeatLabel = labels[bestFeat]", "arbol def classify(inputTree,featLabels,testVec): #Funcion que recorre un arbol para dar el resultado de", "= secondDict[key] #Se adentra al arbol, obteniendo el siguiente arbol que corresponde a", "actual en la variable de labelCounts, en caso de que no, añade la", "que la de la variable, asigna esta como la nueva mejor bestFeature =", "de este subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma la entropia dada para", "viene a ser un conjunto de arboles featIndex = featLabels.index(firstStr) #Dentro del vector", "retorna el valor de la clase mayoritaria classCount={} #Diccionario donde se guardará la", "0 #Comprueba si ya se encuentra la clase actual en la variable de", "\"i\" subDataSet = splitDataSet(dataSet, i, value) #Obtiene el subconjunto de datos con la", "arbol que corresponde a la caracteristica dada por el vector de entrada if", "de que no, añade la key al diccionario y la establece en 0", "key al diccionario y la establece en 0 labelCounts[currentLabel] += 1 #aumenta en", "calcShannonEnt(dataSet) #calcula la entropia completa del dataset bestInfoGain = 0.0; bestFeature = -1", "cada clase for vote in classList: #For que recorre la lista de clases", "#Crea un diccionario de diccionarios cuya llave es la de la etiqueta con", "etiqutas, pues si enviara tal cual el valor de labels, entre los subarboles", "mejores caracteristicas del vector de etiquetas, en resumen, dividió las etiquetas en 2", "caracteristica return classLabel #retorna la etiqueta de clase def storeTree(inputTree,filename): #Funcion que guarda", "in labelCounts.keys(): labelCounts[currentLabel] = 0 #Comprueba si ya se encuentra la clase actual", "clase baseEntropy = calcShannonEnt(dataSet) #calcula la entropia completa del dataset bestInfoGain = 0.0;", "= baseEntropy - newEntropy #Calcula la ganancia if (infoGain > bestInfoGain): #Compara la", "valores de la mejor caracteristica para despues poder hacer las siguientes divisiones uniqueVals", "los elementos únicos con sus caracteristicas currentLabel = featVec[-1] #Establece currentLable al valor", "1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0, 1, 'no'], [0, 1,", "mequeño dataset dataSet = [[1, 1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'],", "si cumple con la condicion dada como parametro reducedFeatVec = featVec[:axis] #crea una", "los elementos restantes menos la columna del axis retDataSet.append(reducedFeatVec) #agrega la fila a", "2 featValues = [example[bestFeat] for example in dataSet] #Obtiene los valores de la", "del vector de caracteristicas, obtiene aquella que corresponde con la del nivel actual", "sea igual al valor dado retDataSet = [] #variable donde se retornará la", "myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) #asigna por cada uno de los valores únicos", "1 #Obtiene el numero de caracteristicas (columnas). Es importante recalcar que la ultima", "ganancia bestInfoGain = infoGain #Si es mejor que la de la variable, asigna", "(infoGain > bestInfoGain): #Compara la ganancia con la de mejor ganancia bestInfoGain =", "la entropia completa del dataset bestInfoGain = 0.0; bestFeature = -1 #inicializa los", "las caracteristicas featList = [example[i] for example in dataSet]#Crea una lista con todas", "etiquetas de las mejores caracteristicas del vector de etiquetas, en resumen, dividió las", "los valores únicos de esta caractetistica for value in uniqueVals: #for que recorre", "completa del dataset bestInfoGain = 0.0; bestFeature = -1 #inicializa los valores para", "el siguiente arbol que corresponde a la caracteristica dada por el vector de", "la probabilidad de cada una de las etiquetas y lo añade a la", "vector de etiquetas, en resumen, dividió las etiquetas en 2 featValues = [example[bestFeat]", "calcula la entropia y la añade a la suma, utiliza logaritmo base 2", "de la clase mayoritaria classCount={} #Diccionario donde se guardará la cuenta de cada", "caracteristica que corresponde con la del arbol del vector de entrada valueOfFeat =", "siguiente arbol que corresponde a la caracteristica dada por el vector de entrada", "shannonEnt #retorna la entropia def splitDataSet(dataSet, axis, value): #Funcion que retorna todas las", "que tienen la caracteristica \"i\" uniqueVals = set(featList) #Obtiene un conjunto con todos", "variable, asigna esta como la nueva mejor bestFeature = i #Tambien guarda el", "que recorre todos los valores únicos de la caracteristica \"i\" subDataSet = splitDataSet(dataSet,", "etiquetas y lo añade a la \"suma\" prob = float(labelCounts[key])/numEntries #aqui calcula la", "caracteristicas, obtiene aquella que corresponde con la del nivel actual del arbol key", "probabilidad de este subconjunto newEntropy += prob * calcShannonEnt(subDataSet) #Suma la entropia dada", "= chooseBestFeatureToSplit(dataSet) #Obtiene la mejor caracteristica para dividir bestFeatLabel = labels[bestFeat] #Obtiene las", "la clase shannonEnt = 0.0 #Inicializa la entropia en ceros for key in", "pickle fw = open(filename,'wb') pickle.dump(inputTree,fw) fw.close() def grabTree(filename): #Funcion que carga un arbol", "value in uniqueVals: #for que recorre los valores unicos de la mejor caracteristica", "caracteristica dada newEntropy = 0.0 #Inicializa el valor de la entropia for value", "una lista con todas las filas que tienen la caracteristica \"i\" uniqueVals =", "labelCounts[currentLabel] = 0 #Comprueba si ya se encuentra la clase actual en la", "numero de clases es equivalente al tamaño de la lista de clases return", "dataset if featVec[axis] == value: #comprueba si cumple con la condicion dada como", "la mejor ganancia y la mejor caracteristica for i in range(numFeatures): #for que", "despues poder hacer las siguientes divisiones uniqueVals = set(featValues) #aqui obtiene los valores", "corresponde al valor de la caracteristica return classLabel #retorna la etiqueta de clase", "bestFeature = i #Tambien guarda el valor de i, es decir, el índice", "clase shannonEnt = 0.0 #Inicializa la entropia en ceros for key in labelCounts:", "#En caso de que no exista la key, la añade como cero classCount[vote]", "retorna todas las filas del dataset cuya columna (axis) sea igual al valor", "clase actual en la variable de labelCounts, en caso de que no, añade", "featVec[:axis] #crea una fila con todos los datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:])", "mejor ganancia bestInfoGain = infoGain #Si es mejor que la de la variable,", "chooseBestFeatureToSplit(dataSet): #Esta es la funcion nucleo del algoritmo numFeatures = len(dataSet[0]) - 1", "value) #Obtiene el subconjunto de datos con la caracteristica i prob = len(subDataSet)/float(len(dataSet))", "1, 'no'], [0, 1, 'no']] labels = ['no surfacing','flippers'] #change to discrete values", "del dataset bestInfoGain = 0.0; bestFeature = -1 #inicializa los valores para la", "se guardará la cuenta de cada clase for vote in classList: #For que", "= float(labelCounts[key])/numEntries #aqui calcula la probabilidad shannonEnt -= prob * log(prob,2) #aqui calcula", "in dataSet]#Crea una lista con todas las filas que tienen la caracteristica \"i\"", "ceros for key in labelCounts: #En este for calcula la probabilidad de cada", "value: #comprueba si cumple con la condicion dada como parametro reducedFeatVec = featVec[:axis]", "[0, 1, 'no'], [0, 1, 'no']] labels = ['no surfacing','flippers'] #change to discrete", "lista de clases pertenece a la ultima columna del dataset, por eso la", "que guarda un arbol en un archivo import pickle fw = open(filename,'wb') pickle.dump(inputTree,fw)", "carga un arbol de un archivo import pickle fr = open(filename,'rb') return pickle.load(fr)", "clase mayoritaria def createTree(dataSet,labels): #Esta funcion se encarga de la creación del arbol", "#For que recorre la lista de clases if vote not in classCount.keys(): classCount[vote]", "datos hasta antes del axis reducedFeatVec.extend(featVec[axis+1:]) #añade los elementos restantes menos la columna", "elementos restantes menos la columna del axis retDataSet.append(reducedFeatVec) #agrega la fila a la", "valor de i, es decir, el índice de la mejor caracteristica return bestFeature", "importante recalcar que la ultima columna pertenece a la etiqueta de clase baseEntropy", "filas del dataset labelCounts = {} #Diccionario donde se guarda la cuenta de" ]
[ "Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the", "fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints", "self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid endpoints supported by", "class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All", "'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes':", "webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def", "the configuration parameters required for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self):", "a simple python file that holds all the configuration parameters required for fitbit", "by the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories':", "by Fitbit API's. :return: A dictionary with valid hash map for fitbit API's", "the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json',", "This is a simple python file that holds all the configuration parameters required", "is a simple python file that holds all the configuration parameters required for", "__author__ = '<NAME>' \"\"\" Author: <NAME> Email: <EMAIL> Date: February 2, 2018 This", "which will be utilized by the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps':", "all the configuration parameters required for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def", "self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid endpoints supported by Fitbit", "file that holds all the configuration parameters required for fitbit webservice wrapper. \"\"\"", "valid hash map for fitbit API's which will be utilized by the wrapper.", "'/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json',", "{ 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json',", "valid endpoints supported by Fitbit API's. :return: A dictionary with valid hash map", "API's. :return: A dictionary with valid hash map for fitbit API's which will", "'/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json',", "\"\"\" Author: <NAME> Email: <EMAIL> Date: February 2, 2018 This is a simple", "def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid", "the valid endpoints supported by Fitbit API's. :return: A dictionary with valid hash", "be utilized by the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime':", "parameters required for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__()", "def _getFitbitEndpoints(self): \"\"\" All the valid endpoints supported by Fitbit API's. :return: A", "'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes':", "<NAME> Email: <EMAIL> Date: February 2, 2018 This is a simple python file", "All the valid endpoints supported by Fitbit API's. :return: A dictionary with valid", "'/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json',", "'/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board': '/1/user/-/friends/leaderboard.json', 'todays_sleep_details': '/1.2/user/-/sleep/date/today.json',", "Email: <EMAIL> Date: February 2, 2018 This is a simple python file that", "API's which will be utilized by the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json',", "'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime':", "wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self):", "dictionary with valid hash map for fitbit API's which will be utilized by", "'/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board': '/1/user/-/friends/leaderboard.json',", "self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid endpoints supported by Fitbit API's. :return:", "2018 This is a simple python file that holds all the configuration parameters", "holds all the configuration parameters required for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object):", "for fitbit API's which will be utilized by the wrapper. \"\"\" return {", "'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes':", "'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details':", "Fitbit API's. :return: A dictionary with valid hash map for fitbit API's which", "'/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json',", "hash map for fitbit API's which will be utilized by the wrapper. \"\"\"", "fitbit API's which will be utilized by the wrapper. \"\"\" return { 'todays_steps_realtime':", "A dictionary with valid hash map for fitbit API's which will be utilized", "utilized by the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json',", "'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime':", "required for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints", "return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes':", "will be utilized by the wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json',", "supported by Fitbit API's. :return: A dictionary with valid hash map for fitbit", "'/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json',", "February 2, 2018 This is a simple python file that holds all the", "'/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board': '/1/user/-/friends/leaderboard.json', 'todays_sleep_details': '/1.2/user/-/sleep/date/today.json', 'todays_heart_details': '/1/user/-/activities/heart/date/today/1d.json',", "<EMAIL> Date: February 2, 2018 This is a simple python file that holds", "'<NAME>' \"\"\" Author: <NAME> Email: <EMAIL> Date: February 2, 2018 This is a", "_getFitbitEndpoints(self): \"\"\" All the valid endpoints supported by Fitbit API's. :return: A dictionary", "'/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json',", "'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered':", "\"\"\" All the valid endpoints supported by Fitbit API's. :return: A dictionary with", "'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board': '/1/user/-/friends/leaderboard.json', 'todays_sleep_details': '/1.2/user/-/sleep/date/today.json', 'todays_heart_details':", "'/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime': '/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json',", "'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered':", ":return: A dictionary with valid hash map for fitbit API's which will be", "= '<NAME>' \"\"\" Author: <NAME> Email: <EMAIL> Date: February 2, 2018 This is", "__init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid endpoints", "for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints =", "super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid endpoints supported", "\"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json',", "'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board': '/1/user/-/friends/leaderboard.json', 'todays_sleep_details': '/1.2/user/-/sleep/date/today.json', 'todays_heart_details': '/1/user/-/activities/heart/date/today/1d.json', }", "that holds all the configuration parameters required for fitbit webservice wrapper. \"\"\" class", "python file that holds all the configuration parameters required for fitbit webservice wrapper.", "Author: <NAME> Email: <EMAIL> Date: February 2, 2018 This is a simple python", "configuration parameters required for fitbit webservice wrapper. \"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config,", "'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board': '/1/user/-/friends/leaderboard.json', 'todays_sleep_details':", "Date: February 2, 2018 This is a simple python file that holds all", "simple python file that holds all the configuration parameters required for fitbit webservice", "'/1/user/-/activities/minutesLightlyActive/date/today/1d.json', 'todays_fairly_active_minutes_realtime': '/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json',", "with valid hash map for fitbit API's which will be utilized by the", "\"\"\" class Fitbit_Webservice_Config(object): def __init__(self): super(Fitbit_Webservice_Config, self).__init__() self.get_fitbit_endpoints = self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\"", "'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json', 'lifetime_activities_details': '/1/user/-/activities.json', 'get_friends_leader_board':", "'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime': '/1/user/-/activities/minutesSedentary/date/today/1d.json', 'last_7_days_sedentary_minutes': '/1/user/-/activities/minutesSedentary/date/today/7d.json', 'todays_lightly_active_minutes_realtime':", "map for fitbit API's which will be utilized by the wrapper. \"\"\" return", "= self._getFitbitEndpoints def _getFitbitEndpoints(self): \"\"\" All the valid endpoints supported by Fitbit API's.", "'/1/user/-/activities/minutesFairlyActive/date/today/1d.json', 'todays_very_active_minutes_realtime': '/1/user/-/activities/minutesVeryActive/date/today/1d.json', 'last_7_days_lightly_active_minutes': '/1/user/-/activities/minutesLightlyActive/date/today/7d.json', 'last_7_days_fairly_active_minutes': '/1/user/-/activities/minutesFairlyActive/date/today/7d.json', 'last_7_days_very_active_minutes': '/1/user/-/activities/minutesVeryActive/date/today/7d.json', 'todays_realtime_distance_covered': '/1/user/-/activities/distance/date/today/1d.json', 'last_7_days_distance_covered': '/1/user/-/activities/distance/date/today/7d.json',", "2, 2018 This is a simple python file that holds all the configuration", "wrapper. \"\"\" return { 'todays_steps_realtime': '/1/user/-/activities/steps/date/today/1d.json', 'last_7_days_steps': '/1/user/-/activities/steps/date/today/7d.json', 'todays_calories_realtime': '/1/user/-/activities/calories/date/today/1d.json', 'last_7_days_calories': '/1/user/-/activities/calories/date/today/7d.json', 'todays_sedentary_minutes_realtime':", "endpoints supported by Fitbit API's. :return: A dictionary with valid hash map for" ]
[ "module.momentum = None module.num_batches_tracked *= 0 for input in loader: if isinstance(input, (list,", "avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred,", "criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {}", "if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if", "def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP =", "average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for i in range(len(y_true[0])): class_i_pred =", "filename) return metric else: return prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler, criterion,", "20) best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec", "= torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta: return was_training = model.training model.train()", "model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir", "= outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true =", "y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output)", "src.configuration as C import src.models as models import src.utils as utils from copy", "tensors; in the latter case it is assumed that :meth:`model.forward()` should be called", "over data in `loader` to estimate the activation statistics for BatchNorm layers in", "logdir, filename) return metric else: return prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler,", "numpy as np import torch import torch.nn as nn import src.configuration as C", "state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir / filename with open(weights_path, \"wb\") as f:", "model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1", "criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss", "update_parameters(self, model): for p_swa, p_model in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ =", "y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1", "logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid", "def train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\",", "in config[\"split\"][\"name\"]: y = calltype_labels else: y = df[\"ebird_code\"] for i, (trn_idx, val_idx)", "config, phase, event_level_labels) for df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"]) } model", "targs = [] cnt = n for step, batch in enumerate(progress_bar(dataloader)): cnt -=", "device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\", "class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if device", "model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"]", "for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer,", "containing data. model (torch.nn.Module): model for which we seek to update BatchNorm statistics.", "save_model(model, logdir: Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path =", "class_i_targ = y_true[:, i] if class_i_targ.sum() == 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0)", "avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) / (num_averaged +", "if device is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if", "axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP =", "(list, tuple)): input = input[0] if isinstance(input, dict): input = input[input_key] if device", "phase, event_level_labels) for df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"]) } model =", "logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" *", "averaged_model_parameter) / (num_averaged + 1) self.avg_fn = avg_fn def forward(self, *args, **kwargs): return", "df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for df_,", "num_averaged): return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) / (num_averaged + 1) self.avg_fn", "\"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else: y = df[\"ebird_code\"] for i, (trn_idx,", "/ len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy()", "with open(weights_path, \"wb\") as f: torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric: float,", "logger.info(\"#\" * 20) best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir,", "f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if", "each data batch in :attr:`loader` is either a tensor or a list or", "None if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else: y = df[\"ebird_code\"] for", "if device is not None: input = input.to(device) model(input) for bn_module in momenta.keys():", "* averaged_model_parameter + 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders,", "model) .. note:: The `update_bn` utility assumes that each data batch in :attr:`loader`", "\"\"\" momenta = {} for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean =", "= mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\")", "= None module.num_batches_tracked *= 0 for input in loader: if isinstance(input, (list, tuple)):", "in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model( model, logdir, \"best.pth\",", "return prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir: Path, logger,", "y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1", "= utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir", "device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"]", "= models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config)", "performs one pass over data in `loader` to estimate the activation statistics for", "> threshold class_i_targ = y_true[:, i] if class_i_targ.sum() == 0 and class_i_pred.sum() ==", "statistics on. Each data batch should be either a tensor, or a list/tuple", "[] cnt = n for step, batch in enumerate(progress_bar(dataloader)): cnt -= 1 x", "logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP,", "tensor, or a list/tuple whose first element is a tensor containing data. model", "train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75,", "logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df", "else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion,", "in the latter case it is assumed that :meth:`model.forward()` should be called on", "not momenta: return was_training = model.training model.train() for module in momenta.keys(): module.momentum =", "target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0)", "= mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1: val_dataloader", "utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter =", "== \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir", "1) self.avg_fn = avg_fn def forward(self, *args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self,", "torch.nn as nn import src.configuration as C import src.models as models import src.utils", "BatchNorm layers in the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute the", "compute the activation statistics on. Each data batch should be either a tensor,", "scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1", "of tensors; in the latter case it is assumed that :meth:`model.forward()` should be", "Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute the activation statistics on. Each data", "warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"])", "(output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir /", "classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1", "which we seek to update BatchNorm statistics. device (torch.device, optional): If set, data", "input = input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def", "x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y)", "calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean()", "and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded =", "axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred:", "for epoch in range(epochs): t0 = time.time() epoch += 1 logger.info(\"=\" * 20)", "y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None)", "threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for", "input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] =", "train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\"", "src.models as models import src.utils as utils from copy import deepcopy from pathlib", "device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in the model. It performs one", "input[input_key] if device is not None: input = input.to(device) model(input) for bn_module in", "if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter", "**kwargs) def update_parameters(self, model): for p_swa, p_model in zip(self.parameters(), model.parameters()): device = p_swa.device", "n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key)", "seek to update BatchNorm statistics. device (torch.device, optional): If set, data will be", "self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var", "either a tensor or a list or tuple of tensors; in the latter", "device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds = [] targs =", "model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"]", "= batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss", "avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter -", "AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if device is", "1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in the", "{ phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for df_, phase in zip([trn_df, val_df],", "train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"]", "y_true[:, i] if class_i_targ.sum() == 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ,", "{} best_metric = -np.inf for epoch in range(epochs): t0 = time.time() epoch +=", "= calltype_labels else: y = df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate( splitter.split(df,", "= AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9 *", "model.train() preds = [] targs = [] cnt = n for step, batch", "mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for", "= C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config)", "nn import src.configuration as C import src.models as models import src.utils as utils", "= batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y) optimizer.zero_grad()", "cnt == 0: ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy()", "logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config", "model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True)", "+ 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler,", "= 0.0 model.train() preds = [] targs = [] cnt = n for", "eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\"", "(y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def", "or a list or tuple of tensors; in the latter case it is", "eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key,", "input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP", "the data batch. \"\"\" momenta = {} for module in model.modules(): if isinstance(module,", "C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter,", "model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum", "tuple of tensors; in the latter case it is assumed that :meth:`model.forward()` should", "0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int)", "== 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true", "20) logger.info(\"Train metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for", "avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0", "Each data batch should be either a tensor, or a list/tuple whose first", "corresponding to the data batch. \"\"\" momenta = {} for module in model.modules():", "= deepcopy(model) if device is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long,", "60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args()", "sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\")", "[] targs = [] cnt = n for step, batch in enumerate(progress_bar(dataloader)): cnt", "is assumed that :meth:`model.forward()` should be called on the first element of the", ":meth:`model.forward()` should be called on the first element of the list or tuple", "train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if", "eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds = []", "= p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_,", "y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step()", "= input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model,", "for module in momenta.keys(): module.momentum = None module.num_batches_tracked *= 0 for input in", "isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if not", "whose first element is a tensor containing data. model (torch.nn.Module): model for which", "[\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config)", "else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates", "open(weights_path, \"wb\") as f: torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric: float, prev_metric:", "len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target", "\"wb\") as f: torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric: float, prev_metric: float):", "ema_model, dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train()", "def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds =", "y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device,", "Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir /", "+= loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred", "= n for step, batch in enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device)", "mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1: val_dataloader =", "mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"]", "splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels =", "n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics = {} best_metric =", "{value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model,", "model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9 * model_parameter) (output_dir", "def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in the model.", "= C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 *", "criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds = [] targs =", "train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader =", "data in `loader` to estimate the activation statistics for BatchNorm layers in the", "1: val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true =", "enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss", "from fastprogress import progress_bar from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self,", "logger.info(\"Train metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key,", "p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm", ">>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility assumes that each data batch", "criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss", "assumes that each data batch in :attr:`loader` is either a tensor or a", "metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min = int(elapsed_sec", "1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs,", "*args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self, model): for p_swa, p_model in zip(self.parameters(),", "None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) /", "def save_best_model(model, logdir, filename, metric: float, prev_metric: float): if metric > prev_metric: save_model(model,", "in momenta.keys(): module.momentum = None module.num_batches_tracked *= 0 for input in loader: if", "def save_model(model, logdir: Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path", "if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def", "p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model, device=None, input_key=\"\"):", "device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df)", "num_averaged: 0.1 * averaged_model_parameter + 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model,", "of the list or tuple corresponding to the data batch. \"\"\" momenta =", "C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\")", "= criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target =", "device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if device is not None: self.module =", "y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for i in range(len(y_true[0])): class_i_pred", "not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None:", "Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df,", "event_level_labels) for df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device)", "logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics = {} best_metric", "input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds = [] targs = [] for", "= model(x) loss = criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output =", "as np import torch import torch.nn as nn import src.configuration as C import", "progress_bar from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None):", "y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1", "for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value in", "output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device =", "criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader) if cnt ==", "in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\")", "time import warnings import numpy as np import torch import torch.nn as nn", "np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s", "prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min = int(elapsed_sec //", "dict): input = input[input_key] if device is not None: input = input.to(device) model(input)", "trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_,", "model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader)", "= calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"]", "for p_swa, p_model in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device) if", "= y_pred[:, i] > threshold class_i_targ = y_true[:, i] if class_i_targ.sum() == 0", "avg_fn=None): super().__init__() self.module = deepcopy(model) if device is not None: self.module = self.module.to(device)", "logdir / filename with open(weights_path, \"wb\") as f: torch.save(state_dict, f) def save_best_model(model, logdir,", "model.eval() preds = [] targs = [] for step, batch in enumerate(progress_bar(dataloader)): with", "`loader` to estimate the activation statistics for BatchNorm layers in the model. Arguments:", "first element is a tensor containing data. model (torch.nn.Module): model for which we", "model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute the activation statistics on. Each", "input = input[input_key] if device is not None: input = input.to(device) model(input) for", "= momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\",", "loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt", "mAP = average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for i", "logdir, filename, metric: float, prev_metric: float): if metric > prev_metric: save_model(model, logdir, filename)", "model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss", "eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred)", "= sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}:", "import time import warnings import numpy as np import torch import torch.nn as", "f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\", logger=logger,", ":attr:`loader` is either a tensor or a list or tuple of tensors; in", "= config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"])", "as models import src.utils as utils from copy import deepcopy from pathlib import", "first element of the list or tuple corresponding to the data batch. \"\"\"", "module in momenta.keys(): module.momentum = None module.num_batches_tracked *= 0 for input in loader:", "*= 0 for input in loader: if isinstance(input, (list, tuple)): input = input[0]", "i] > threshold class_i_targ = y_true[:, i] if class_i_targ.sum() == 0 and class_i_pred.sum()", "in the model. It performs one pass over data in `loader` to estimate", "= {} best_metric = -np.inf for epoch in range(epochs): t0 = time.time() epoch", "key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model( model,", "the first element of the list or tuple corresponding to the data batch.", "element of the list or tuple corresponding to the data batch. \"\"\" momenta", "= avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss,", "= mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true =", "in `loader` to estimate the activation statistics for BatchNorm layers in the model.", ".. note:: The `update_bn` utility assumes that each data batch in :attr:`loader` is", "global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\")", "logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric)", "torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta:", "epoch in range(epochs): t0 = time.time() epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch", "// 60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__", "import src.models as models import src.utils as utils from copy import deepcopy from", "return mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path, filename: str): state_dict = {}", "\\ (model_parameter - averaged_model_parameter) / (num_averaged + 1) self.avg_fn = avg_fn def forward(self,", "outputs = model(x) loss = criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output", "dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] =", "activation statistics for BatchNorm layers in the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader", "val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir, config, phase, event_level_labels)", "* model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device,", "import src.utils as utils from copy import deepcopy from pathlib import Path from", "set, data will be transferred to :attr:`device` before being passed into :attr:`model`. Example:", "targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true", "Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics = {}", "sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path,", "model(input) for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader,", "= [] targs = [] cnt = n for step, batch in enumerate(progress_bar(dataloader)):", "import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module =", "cnt = n for step, batch in enumerate(progress_bar(dataloader)): cnt -= 1 x =", "input[0] if isinstance(input, dict): input = input[input_key] if device is not None: input", "from copy import deepcopy from pathlib import Path from fastprogress import progress_bar from", "models import src.utils as utils from copy import deepcopy from pathlib import Path", "average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for i in range(len(y_true[0])):", "= batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y).detach() avg_loss += loss.item() /", "C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels", "assumed that :meth:`model.forward()` should be called on the first element of the list", "else: return prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir: Path,", "scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"]", "df, datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is", "fastprogress import progress_bar from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model,", "in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df =", "to compute the activation statistics on. Each data batch should be either a", "was_training = model.training model.train() for module in momenta.keys(): module.momentum = None module.num_batches_tracked *=", "bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device, n=10,", "import progress_bar from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None,", "key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value in eval_metrics.items():", "\"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min =", "int(elapsed_sec // 60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if", "= C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels =", "criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds = [] targs", "metric else: return prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir:", "import Path from fastprogress import progress_bar from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module):", "avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20)", "C import src.models as models import src.utils as utils from copy import deepcopy", "(num_averaged + 1) self.avg_fn = avg_fn def forward(self, *args, **kwargs): return self.module(*args, **kwargs)", "[] for i in range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold class_i_targ =", "utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger", "mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch(", "passed into :attr:`model`. Example: >>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model) ..", "/ (num_averaged + 1) self.avg_fn = avg_fn def forward(self, *args, **kwargs): return self.module(*args,", "range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold class_i_targ = y_true[:, i] if class_i_targ.sum()", "\"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir =", "mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for i in range(len(y_true[0])): class_i_pred = y_pred[:,", "t0 = time.time() epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" *", "np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray,", "eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true", "import torch.nn as nn import src.configuration as C import src.models as models import", "df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if i not in", "momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"):", "= Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"])", "= utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True)", "device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics", "= int(elapsed_sec // 60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\")", "C.get_loader(df_, datadir, config, phase, event_level_labels) for df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"])", "BatchNorm statistics. device (torch.device, optional): If set, data will be transferred to :attr:`device`", "device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"]", "logdir, \"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec", "list or tuple of tensors; in the latter case it is assumed that", "__name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"]", "be either a tensor, or a list/tuple whose first element is a tensor", "eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric],", "classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true,", "will be transferred to :attr:`device` before being passed into :attr:`model`. Example: >>> loader,", "before being passed into :attr:`model`. Example: >>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader,", "not None: input = input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module]", "filename with open(weights_path, \"wb\") as f: torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric:", "device (torch.device, optional): If set, data will be transferred to :attr:`device` before being", "= batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y).detach() avg_loss", "input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] =", "y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true", "prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir: Path, logger, n=10,", "dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\", logger=logger, n=10, main_metric=global_params[\"main_metric\"], epochs=global_params[\"num_epochs\"], input_key=global_params[\"input_key\"],", "config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter +", "y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval()", "logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true =", "prev_metric: float): if metric > prev_metric: save_model(model, logdir, filename) return metric else: return", "elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args", "classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader", "from pathlib import Path from fastprogress import progress_bar from sklearn.metrics import average_precision_score, f1_score", "+ \\ (model_parameter - averaged_model_parameter) / (num_averaged + 1) self.avg_fn = avg_fn def", "in enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs =", "model.train() for module in momenta.keys(): module.momentum = None module.num_batches_tracked *= 0 for input", "loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility assumes", "is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter)", "+= loss.item() / len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt = n clipwise_output", "p_swa, p_model in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged", "np import torch import torch.nn as nn import src.configuration as C import src.models", "in the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute the activation statistics", "transferred to :attr:`device` before being passed into :attr:`model`. Example: >>> loader, model =", "= np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5):", "parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\", logger=logger, n=10,", "in enumerate( splitter.split(df, y=y)): if i not in global_params[\"folds\"]: continue logger.info(\"=\" * 20)", "momenta.keys(): module.momentum = None module.num_batches_tracked *= 0 for input in loader: if isinstance(input,", "list/tuple whose first element is a tensor containing data. model (torch.nn.Module): model for", "device is not None: input = input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum", "= 0.0 model.eval() preds = [] targs = [] for step, batch in", "optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader) if cnt == 0: ema_model.update_parameters(model)", "model for which we seek to update BatchNorm statistics. device (torch.device, optional): If", "y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds", "momenta[module] = module.momentum if not momenta: return was_training = model.training model.train() for module", "n for step, batch in enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device) y", "loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred =", "> prev_metric: save_model(model, logdir, filename) return metric else: return prev_metric def train(model, ema_model,", "y = df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if i", "- averaged_model_parameter) / (num_averaged + 1) self.avg_fn = avg_fn def forward(self, *args, **kwargs):", "for df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion", "model): for p_swa, p_model in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device)", "if isinstance(input, dict): input = input[input_key] if device is not None: input =", "model = ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility assumes that", "deepcopy(model) if device is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device))", "p_model_ = p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged", "loader: if isinstance(input, (list, tuple)): input = input[0] if isinstance(input, dict): input =", "classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] =", "logger.info(\"Valid metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric", "config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\"", "super().__init__() self.module = deepcopy(model) if device is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\",", "np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = []", "= model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() /", "return was_training = model.training model.train() for module in momenta.keys(): module.momentum = None module.num_batches_tracked", "self.n_averaged += 1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers", "/ f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\",", "model(x) loss = criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy()", "sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key)", "i not in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20)", "weights_path = logdir / filename with open(weights_path, \"wb\") as f: torch.save(state_dict, f) def", "60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ ==", "= C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter,", "called on the first element of the list or tuple corresponding to the", "f: torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric: float, prev_metric: float): if metric", "the model. It performs one pass over data in `loader` to estimate the", "for i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if i not in global_params[\"folds\"]:", "or tuple corresponding to the data batch. \"\"\" momenta = {} for module", "-np.inf for epoch in range(epochs): t0 = time.time() epoch += 1 logger.info(\"=\" *", "input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP", "train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0", "return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss =", "a list or tuple of tensors; in the latter case it is assumed", "average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path, filename: str): state_dict =", "= { phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for df_, phase in zip([trn_df,", "outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred =", "p_model in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged ==", "y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key,", "be transferred to :attr:`device` before being passed into :attr:`model`. Example: >>> loader, model", "avg_loss += loss.item() / len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt = n", "20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = { phase:", "loader (torch.utils.data.DataLoader): dataset loader to compute the activation statistics on. Each data batch", "= torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta: return was_training", "self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return", "the latter case it is assumed that :meth:`model.forward()` should be called on the", "time.time() - t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec = elapsed_sec % 60", "if class_i_targ.sum() == 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1", "0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion,", "> threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model,", "y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 =", "import torch import torch.nn as nn import src.configuration as C import src.models as", "**kwargs): return self.module(*args, **kwargs) def update_parameters(self, model): for p_swa, p_model in zip(self.parameters(), model.parameters()):", "src.utils as utils from copy import deepcopy from pathlib import Path from fastprogress", "n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds = [] targs = []", "batch in enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs", "0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded", "metric: float, prev_metric: float): if metric > prev_metric: save_model(model, logdir, filename) return metric", "metric > prev_metric: save_model(model, logdir, filename) return metric else: return prev_metric def train(model,", "if i not in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" *", "for input in loader: if isinstance(input, (list, tuple)): input = input[0] if isinstance(input,", "step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs", ">>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility", "dataset loader to compute the activation statistics on. Each data batch should be", "= utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter", "model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if device is not None: self.module", "data. model (torch.nn.Module): model for which we seek to update BatchNorm statistics. device", "avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1,", "None: input = input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training)", "eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train", "not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]:", "{} eval_metrics = {} best_metric = -np.inf for epoch in range(epochs): t0 =", "np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader,", "utils from copy import deepcopy from pathlib import Path from fastprogress import progress_bar", "train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1:", "= C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel(", "a tensor or a list or tuple of tensors; in the latter case", "y=y)): if i not in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\"", "to update BatchNorm statistics. device (torch.device, optional): If set, data will be transferred", "= dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model,", "avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1,", "val_idx) in enumerate( splitter.split(df, y=y)): if i not in global_params[\"folds\"]: continue logger.info(\"=\" *", "utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels =", "in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged == 0:", "model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else:", "axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion,", "mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"]", "save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min = int(elapsed_sec // 60)", "buffers in the model. It performs one pass over data in `loader` to", "threshold class_i_targ = y_true[:, i] if class_i_targ.sum() == 0 and class_i_pred.sum() == 0:", "(torch.device, optional): If set, data will be transferred to :attr:`device` before being passed", "that :meth:`model.forward()` should be called on the first element of the list or", "input in loader: if isinstance(input, (list, tuple)): input = input[0] if isinstance(input, dict):", "input = input[0] if isinstance(input, dict): input = input[input_key] if device is not", "preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred,", "= classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key, value", "{i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders", "dataloaders, optimizer, scheduler, criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"):", "i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if i not in global_params[\"folds\"]: continue", "latter case it is assumed that :meth:`model.forward()` should be called on the first", "device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss,", "= avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" *", "best_metric = -np.inf for epoch in range(epochs): t0 = time.time() epoch += 1", "zip(self.parameters(), model.parameters()): device = p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_)", "device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss", "(trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if i not in global_params[\"folds\"]: continue logger.info(\"=\"", "module.num_batches_tracked *= 0 for input in loader: if isinstance(input, (list, tuple)): input =", "optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\", logger=logger, n=10, main_metric=global_params[\"main_metric\"], epochs=global_params[\"num_epochs\"], input_key=global_params[\"input_key\"], input_target_key=global_params[\"input_target_key\"])", "0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model, device=None,", "in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion,", "{elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params", "copy import deepcopy from pathlib import Path from fastprogress import progress_bar from sklearn.metrics", "mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"]", "== 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model,", "batch in :attr:`loader` is either a tensor or a list or tuple of", "= outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred", "= [] cnt = n for step, batch in enumerate(progress_bar(dataloader)): cnt -= 1", "it is assumed that :meth:`model.forward()` should be called on the first element of", "outputs = model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item()", "sample_f1 if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\")", "dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds", "module.momentum if not momenta: return was_training = model.training model.train() for module in momenta.keys():", "else: y = df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if", "for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var)", "batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward()", "0: ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target)", "is either a tensor or a list or tuple of tensors; in the", "avg_loss += loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target)", "pathlib import Path from fastprogress import progress_bar from sklearn.metrics import average_precision_score, f1_score class", "if isinstance(input, (list, tuple)): input = input[0] if isinstance(input, dict): input = input[input_key]", "a list/tuple whose first element is a tensor containing data. model (torch.nn.Module): model", "target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds,", "= model.training model.train() for module in momenta.keys(): module.momentum = None module.num_batches_tracked *= 0", "y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y).detach() avg_loss += loss.item()", "logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer,", "prev_metric: save_model(model, logdir, filename) return metric else: return prev_metric def train(model, ema_model, dataloaders,", "the activation statistics on. Each data batch should be either a tensor, or", "tuple corresponding to the data batch. \"\"\" momenta = {} for module in", "torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta: return was_training =", "eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device,", "t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time:", "data batch should be either a tensor, or a list/tuple whose first element", "= [] for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y", "module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta: return", "0.0 model.train() preds = [] targs = [] cnt = n for step,", "y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1", "classwise_f1, sample_f1 def save_model(model, logdir: Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"] =", "range(epochs): t0 = time.time() epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\"", "{elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config)", "import src.configuration as C import src.models as models import src.utils as utils from", "device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds = [] targs = []", "continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True)", "len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0)", "classwise_f1s = [] for i in range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold", "self.module = deepcopy(model) if device is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0,", "= np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\",", "class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred", "None module.num_batches_tracked *= 0 for input in loader: if isinstance(input, (list, tuple)): input", "input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred,", "\"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels", "targs = [] for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device)", "for which we seek to update BatchNorm statistics. device (torch.device, optional): If set,", "targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs,", "to the data batch. \"\"\" momenta = {} for module in model.modules(): if", "datadir, config, phase, event_level_labels) for df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"]) }", "avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred,", "event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else: y =", "momenta: return was_training = model.training model.train() for module in momenta.keys(): module.momentum = None", "ema_model, dataloaders, optimizer, scheduler, criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\",", "= classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader,", "= [] targs = [] for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x", "time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config =", "batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss +=", "ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\", logger=logger, n=10, main_metric=global_params[\"main_metric\"], epochs=global_params[\"num_epochs\"],", "else: event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else: y", "y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path, filename: str): state_dict", "eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\")", "return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) / (num_averaged + 1) self.avg_fn =", "{} for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var =", "val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device,", "epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss,", "20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion,", "scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true", "loss.item() / len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt = n clipwise_output =", "float, prev_metric: float): if metric > prev_metric: save_model(model, logdir, filename) return metric else:", "classwise_f1 eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key, value in", "i in range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold class_i_targ = y_true[:, i]", "in range(epochs): t0 = time.time() epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\")", "= np.nan_to_num(mAP).mean() classwise_f1s = [] for i in range(len(y_true[0])): class_i_pred = y_pred[:, i]", "batch. \"\"\" momenta = {} for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean", "the list or tuple corresponding to the data batch. \"\"\" momenta = {}", "self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged):", "= {} for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var", "-= 1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss =", "batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader)", "= elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\")", "(model_parameter - averaged_model_parameter) / (num_averaged + 1) self.avg_fn = avg_fn def forward(self, *args,", "mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"]", "input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model,", "return self.module(*args, **kwargs) def update_parameters(self, model): for p_swa, p_model in zip(self.parameters(), model.parameters()): device", "save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() -", ":attr:`model`. Example: >>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The", "state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir / filename with open(weights_path,", "= n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device,", "statistics for BatchNorm layers in the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to", "input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] =", "self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader,", "classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\")", "is not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\" in", "event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]: y =", "classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] =", "output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir", "sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1", "Example: >>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn`", "clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step()", "class_i_targ.sum() == 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 =", "= None if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else: y = df[\"ebird_code\"]", "y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key)", "average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model)", "eval_metrics = {} best_metric = -np.inf for epoch in range(epochs): t0 = time.time()", "module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module]", "averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) / (num_averaged + 1) self.avg_fn = avg_fn", "= C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if", "(torch.utils.data.DataLoader): dataset loader to compute the activation statistics on. Each data batch should", "calltype_labels else: y = df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)):", "= f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path, filename:", "0.0 model.eval() preds = [] targs = [] for step, batch in enumerate(progress_bar(dataloader)):", ":].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for df_, phase", "batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs =", "elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\":", "note:: The `update_bn` utility assumes that each data batch in :attr:`loader` is either", "phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for df_, phase in zip([trn_df, val_df], [\"train\",", "clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true", "y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1", "= input[input_key] if device is not None: input = input.to(device) model(input) for bn_module", "{} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir / filename with open(weights_path, \"wb\") as", "Path from fastprogress import progress_bar from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def", "as nn import src.configuration as C import src.models as models import src.utils as", "batch should be either a tensor, or a list/tuple whose first element is", "metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric =", "data batch. \"\"\" momenta = {} for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm):", "isinstance(input, dict): input = input[input_key] if device is not None: input = input.to(device)", "utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir /", "activation statistics on. Each data batch should be either a tensor, or a", "= df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate( splitter.split(df, y=y)): if i not", "self.avg_fn = avg_fn def forward(self, *args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self, model):", "averaged_model_parameter + 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer,", "as utils from copy import deepcopy from pathlib import Path from fastprogress import", "avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n,", "momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device,", "class_i_pred = y_pred[:, i] > threshold class_i_targ = y_true[:, i] if class_i_targ.sum() ==", "input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP", "avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9 * model_parameter) (output_dir /", "= classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else:", "C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config) else:", "= df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for", "str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir / filename with", "= avg_fn def forward(self, *args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self, model): for", "def forward(self, *args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self, model): for p_swa, p_model", "zip([trn_df, val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer =", "enumerate( splitter.split(df, y=y)): if i not in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold", "df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir, config,", "value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}:", "dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] =", "a tensor containing data. model (torch.nn.Module): model for which we seek to update", "= C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels", "dtype=torch.long, device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter +", "optimizer.step() avg_loss += loss.item() / len(dataloader) if cnt == 0: ema_model.update_parameters(model) cnt =", "= save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time()", "model.training model.train() for module in momenta.keys(): module.momentum = None module.num_batches_tracked *= 0 for", "= y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return", "y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded,", "update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in the model. It", "update BatchNorm statistics. device (torch.device, optional): If set, data will be transferred to", "data batch in :attr:`loader` is either a tensor or a list or tuple", "calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"] =", "models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model", "self.module(*args, **kwargs) def update_parameters(self, model): for p_swa, p_model in zip(self.parameters(), model.parameters()): device =", "logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config)", "for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model(", "device = p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(),", "logdir: Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir", "model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() - t0", "datadir = C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not", "config) scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged:", "__init__(self, model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if device is not None:", "model (torch.nn.Module): model for which we seek to update BatchNorm statistics. device (torch.device,", "in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) momenta[module] =", "= criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader) if cnt", "float): if metric > prev_metric: save_model(model, logdir, filename) return metric else: return prev_metric", "main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics = {} best_metric = -np.inf", "enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x)", "20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx,", "in range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold class_i_targ = y_true[:, i] if", "= sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key,", "the activation statistics for BatchNorm layers in the model. Arguments: loader (torch.utils.data.DataLoader): dataset", "r\"\"\"Updates BatchNorm running_mean, running_var buffers in the model. It performs one pass over", "dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader,", "torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta: return was_training = model.training model.train() for", "p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged += 1", "= ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility assumes that each", "model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) / (num_averaged + 1)", "... >>> torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility assumes that each data", "import warnings import numpy as np import torch import torch.nn as nn import", "y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss,", "= time.time() epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20)", "if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params =", "= input[0] if isinstance(input, dict): input = input[input_key] if device is not None:", "avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders) ==", "metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\") for key, value", "model.state_dict() weights_path = logdir / filename with open(weights_path, \"wb\") as f: torch.save(state_dict, f)", "in loader: if isinstance(input, (list, tuple)): input = input[0] if isinstance(input, dict): input", "1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true", "dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true,", "torch import torch.nn as nn import src.configuration as C import src.models as models", "preds = [] targs = [] cnt = n for step, batch in", "eval_metrics[\"EMA_sample_f1\"] = sample_f1 logger.info(\"#\" * 20) logger.info(\"Train metrics\") for key, value in train_metrics.items():", "forward(self, *args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self, model): for p_swa, p_model in", "loaders = { phase: C.get_loader(df_, datadir, config, phase, event_level_labels) for df_, phase in", "f) def save_best_model(model, logdir, filename, metric: float, prev_metric: float): if metric > prev_metric:", "batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y).detach() avg_loss +=", "= C.get_metadata(config) splitter = C.get_split(config) calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None:", "optimizer, scheduler, criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics", "= train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP,", "is a tensor containing data. model (torch.nn.Module): model for which we seek to", "not in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df", "on. Each data batch should be either a tensor, or a list/tuple whose", "input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds = [] targs = [] cnt =", "is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is", "scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds = []", "The `update_bn` utility assumes that each data batch in :attr:`loader` is either a", "cnt -= 1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss", "epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics = {} best_metric = -np.inf for", "value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20) best_metric = save_best_model( model, logdir,", "in zip([trn_df, val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer", "torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter", "elapsed_sec = time.time() - t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec = elapsed_sec", "`update_bn` utility assumes that each data batch in :attr:`loader` is either a tensor", "optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda", "= [] for i in range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold class_i_targ", "* 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df =", "* 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch(", "else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 =", "sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module", "= model.state_dict() weights_path = logdir / filename with open(weights_path, \"wb\") as f: torch.save(state_dict,", "val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch(", "\"valid\"]) } model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler", "or a list/tuple whose first element is a tensor containing data. model (torch.nn.Module):", "0 for input in loader: if isinstance(input, (list, tuple)): input = input[0] if", "bn_module in momenta.keys(): bn_module.momentum = momenta[bn_module] model.train(was_training) def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler,", "the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute the activation statistics on.", "= time.time() - t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec = elapsed_sec %", "save_best_model(model, logdir, filename, metric: float, prev_metric: float): if metric > prev_metric: save_model(model, logdir,", "= np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true:", "momenta = {} for module in model.modules(): if isinstance(module, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean)", "y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1,", "one pass over data in `loader` to estimate the activation statistics for BatchNorm", "} model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler =", "if cnt == 0: ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target =", "utility assumes that each data batch in :attr:`loader` is either a tensor or", "y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s =", "sample_f1 = calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1", "best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec =", "self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter,", "= np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return", "into :attr:`model`. Example: >>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model) .. note::", "= {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir / filename with open(weights_path, \"wb\")", "ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 =", "len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred,", "running_mean, running_var buffers in the model. It performs one pass over data in", "= dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key,", "y_pred[:, i] > threshold class_i_targ = y_true[:, i] if class_i_targ.sum() == 0 and", "y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def", "= df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir,", "= p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device))) self.n_averaged +=", "== 0: ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output)", "{value:.5f}\") logger.info(\"Valid metrics\") for key, value in eval_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"#\" * 20)", "val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model,", "logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device,", "avg_fn def forward(self, *args, **kwargs): return self.module(*args, **kwargs) def update_parameters(self, model): for p_swa,", "None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None: def", "global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx,", "optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred)", "= eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true,", "averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True,", "statistics. device (torch.device, optional): If set, data will be transferred to :attr:`device` before", "f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model, logdir: Path, filename: str):", "dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key)", "deepcopy from pathlib import Path from fastprogress import progress_bar from sklearn.metrics import average_precision_score,", "logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders =", "y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray,", "% 60 logger.info(f\"Elapsed time: {elapsed_min}min {elapsed_sec:.4f}seconds.\") if __name__ == \"__main__\": warnings.filterwarnings(\"ignore\") args =", "np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"):", "= y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0)", "y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 =", "= calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"]", "classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] =", "filename, metric: float, prev_metric: float): if metric > prev_metric: save_model(model, logdir, filename) return", "being passed into :attr:`model`. Example: >>> loader, model = ... >>> torch.optim.swa_utils.update_bn(loader, model)", "= module.momentum if not momenta: return was_training = model.training model.train() for module in", "\"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec =", "ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return", "loss = criterion(outputs, y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader) if", "None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]: y", "C.get_scheduler(optimizer, config) ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter", "outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs,", "optional): If set, data will be transferred to :attr:`device` before being passed into", "return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true,", "config[\"split\"][\"name\"]: y = calltype_labels else: y = df[\"ebird_code\"] for i, (trn_idx, val_idx) in", "device is not None: self.module = self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn", "return metric else: return prev_metric def train(model, ema_model, dataloaders, optimizer, scheduler, criterion, device,", "for BatchNorm layers in the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute", "= -np.inf for epoch in range(epochs): t0 = time.time() epoch += 1 logger.info(\"=\"", "= eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true,", "preds = [] targs = [] for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad():", "time.time() epoch += 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\")", "args = utils.get_parser().parse_args() config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True,", "if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"] logger.info(\"Valid\") avg_loss,", "cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model,", "logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\") elapsed_sec = time.time() - t0 elapsed_min", "for step, batch in enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device) y =", "(torch.nn.Module): model for which we seek to update BatchNorm statistics. device (torch.device, optional):", "isinstance(input, (list, tuple)): input = input[0] if isinstance(input, dict): input = input[input_key] if", "import deepcopy from pathlib import Path from fastprogress import progress_bar from sklearn.metrics import", "p_model_, self.n_averaged.to(device))) self.n_averaged += 1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean,", "np.mean(classwise_f1s) y_pred_thresholded = (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP,", "/ len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) y_pred = np.concatenate(preds,", "we seek to update BatchNorm statistics. device (torch.device, optional): If set, data will", "either a tensor, or a list/tuple whose first element is a tensor containing", "or tuple of tensors; in the latter case it is assumed that :meth:`model.forward()`", "should be called on the first element of the list or tuple corresponding", "<gh_stars>10-100 import time import warnings import numpy as np import torch import torch.nn", "tensor or a list or tuple of tensors; in the latter case it", "= self.module.to(device) self.register_buffer(\"n_averaged\", torch.tensor(0, dtype=torch.long, device=device)) if avg_fn is None: def avg_fn(averaged_model_parameter, model_parameter,", "update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0)", "if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else: y = df[\"ebird_code\"] for i,", "def update_parameters(self, model): for p_swa, p_model in zip(self.parameters(), model.parameters()): device = p_swa.device p_model_", "It performs one pass over data in `loader` to estimate the activation statistics", "axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model, dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss", "= sample_f1 if len(dataloaders) == 1: val_dataloader = dataloaders[\"train\"] else: val_dataloader = dataloaders[\"valid\"]", "= avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] = sample_f1 if len(dataloaders)", "dataloader, criterion, device, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds = [] targs", "to estimate the activation statistics for BatchNorm layers in the model. Arguments: loader", "def train_one_epoch(model, ema_model, dataloader, optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss =", "input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds = [] targs = [] cnt", "def avg_fn(averaged_model_parameter, model_parameter, num_averaged): return averaged_model_parameter + \\ (model_parameter - averaged_model_parameter) / (num_averaged", "sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1", "parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir =", "for i in range(len(y_true[0])): class_i_pred = y_pred[:, i] > threshold class_i_targ = y_true[:,", "torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric: float, prev_metric: float): if metric >", "* 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = {", "eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred)", "logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model,", "from sklearn.metrics import average_precision_score, f1_score class AveragedModel(nn.Module): def __init__(self, model, device=None, avg_fn=None): super().__init__()", "as f: torch.save(state_dict, f) def save_best_model(model, logdir, filename, metric: float, prev_metric: float): if", "calltype_labels = C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels", "phase in zip([trn_df, val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device)", "tensor containing data. model (torch.nn.Module): model for which we seek to update BatchNorm", "= calc_metrics(y_true, y_pred) eval_metrics[\"EMA_loss\"] = avg_loss eval_metrics[\"EMA_mAP\"] = mAP eval_metrics[\"EMA_classwise_f1\"] = classwise_f1 eval_metrics[\"EMA_sample_f1\"]", "with torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss =", "x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs, y).detach()", "elapsed_min = int(elapsed_sec // 60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed time: {elapsed_min}min", "C.get_calltype_labels(df) if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None", "config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger = utils.get_logger(output_dir / \"output.log\") utils.set_seed(global_params[\"seed\"]) device", "np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP", "model. It performs one pass over data in `loader` to estimate the activation", "input_target_key=\"targets\"): avg_loss = 0.0 model.eval() preds = [] targs = [] for step,", "scheduler, criterion, device, logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics =", "= {} eval_metrics = {} best_metric = -np.inf for epoch in range(epochs): t0", "[] targs = [] for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x =", "ema_model = AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9", "case it is assumed that :meth:`model.forward()` should be called on the first element", "np.nan_to_num(mAP).mean() classwise_f1s = [] for i in range(len(y_true[0])): class_i_pred = y_pred[:, i] >", "train_metrics = {} eval_metrics = {} best_metric = -np.inf for epoch in range(epochs):", "element is a tensor containing data. model (torch.nn.Module): model for which we seek", "tuple)): input = input[0] if isinstance(input, dict): input = input[input_key] if device is", "input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics = {} best_metric = -np.inf for epoch", ":].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True) loaders = { phase: C.get_loader(df_, datadir, config, phase,", "/ filename with open(weights_path, \"wb\") as f: torch.save(state_dict, f) def save_best_model(model, logdir, filename,", "p_swa.device p_model_ = p_model.detach().to(device) if self.n_averaged == 0: p_swa.detach().copy_(p_model_) else: p_swa.detach().copy_(self.avg_fn(p_swa.detach(), p_model_, self.n_averaged.to(device)))", "model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in the model. It performs", "C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model = AveragedModel( model,", "pass over data in `loader` to estimate the activation statistics for BatchNorm layers", "config = utils.load_config(args.config) global_params = config[\"globals\"] output_dir = Path(global_params[\"output_dir\"]) output_dir.mkdir(exist_ok=True, parents=True) logger =", "== 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s) y_pred_thresholded = (y_pred >", "in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x)", "should be either a tensor, or a list/tuple whose first element is a", "layers in the model. Arguments: loader (torch.utils.data.DataLoader): dataset loader to compute the activation", "20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model,", "threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1 def save_model(model, logdir:", "be called on the first element of the list or tuple corresponding to", "* 20) best_metric = save_best_model( model, logdir, \"best.pth\", metric=eval_metrics[main_metric], prev_metric=best_metric) save_model(ema_model, logdir, \"ema.pth\")", "torch.optim.swa_utils.update_bn(loader, model) .. note:: The `update_bn` utility assumes that each data batch in", "calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"] = mAP train_metrics[\"classwise_f1\"] = classwise_f1 train_metrics[\"sample_f1\"] =", "on the first element of the list or tuple corresponding to the data", "input_target_key=\"targets\"): train_metrics = {} eval_metrics = {} best_metric = -np.inf for epoch in", "preds.append(clipwise_output) targs.append(target) update_bn(dataloader, ema_model, device=device, input_key=input_key) scheduler.step() y_pred = np.concatenate(preds, axis=0) y_true =", "AveragedModel( model, avg_fn=lambda averaged_model_parameter, model_parameter, num_averaged: 0.1 * averaged_model_parameter + 0.9 * model_parameter)", "data will be transferred to :attr:`device` before being passed into :attr:`model`. Example: >>>", "running_var buffers in the model. It performs one pass over data in `loader`", "train(model=model, ema_model=ema_model, dataloaders=loaders, optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, logdir=output_dir / f\"fold{i}\", logger=logger, n=10, main_metric=global_params[\"main_metric\"],", "= (y_pred > threshold).astype(int) sample_f1 = f1_score(y_true=y_true, y_pred=y_pred_thresholded, average=\"samples\") return mAP, classwise_f1, sample_f1", "step, batch in enumerate(progress_bar(dataloader)): cnt -= 1 x = batch[input_key].to(device) y = batch[input_target_key].to(device).float()", "logdir: Path, logger, n=10, main_metric=\"sample_f1\", epochs=75, input_key=\"image\", input_target_key=\"targets\"): train_metrics = {} eval_metrics =", "optimizer, scheduler, criterion, device, n=10, input_key=\"image\", input_target_key=\"targets\"): avg_loss = 0.0 model.train() preds =", "BatchNorm running_mean, running_var buffers in the model. It performs one pass over data", "y_true def calc_metrics(y_true: np.ndarray, y_pred: np.ndarray, threshold=0.5): mAP = average_precision_score(y_true, y_pred, average=None) mAP", "* 20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler,", "to :attr:`device` before being passed into :attr:`model`. Example: >>> loader, model = ...", "* 20) logger.info(\"Train metrics\") for key, value in train_metrics.items(): logger.info(f\"{key}: {value:.5f}\") logger.info(\"Valid metrics\")", "a tensor, or a list/tuple whose first element is a tensor containing data.", "criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy()", "i] if class_i_targ.sum() == 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred))", "sample_f1 def save_model(model, logdir: Path, filename: str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict()", "as C import src.models as models import src.utils as utils from copy import", "- t0 elapsed_min = int(elapsed_sec // 60) elapsed_sec = elapsed_sec % 60 logger.info(f\"Elapsed", "[] for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y =", "for step, batch in enumerate(progress_bar(dataloader)): with torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float()", "+ 1) self.avg_fn = avg_fn def forward(self, *args, **kwargs): return self.module(*args, **kwargs) def", "y) optimizer.zero_grad() loss.backward() optimizer.step() avg_loss += loss.item() / len(dataloader) if cnt == 0:", "that each data batch in :attr:`loader` is either a tensor or a list", "if not momenta: return was_training = model.training model.train() for module in momenta.keys(): module.momentum", "if metric > prev_metric: save_model(model, logdir, filename) return metric else: return prev_metric def", "logger.info(\"Valid\") avg_loss, y_pred, y_true = eval_one_epoch( model=model, dataloader=val_dataloader, criterion=criterion, device=device, input_key=input_key, input_target_key=input_target_key) mAP,", "filename: str): state_dict = {} state_dict[\"model_state_dict\"] = model.state_dict() weights_path = logdir / filename", "= y_true[:, i] if class_i_targ.sum() == 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else:", "classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model, dataloader=val_dataloader, criterion=criterion,", "y = calltype_labels else: y = df[\"ebird_code\"] for i, (trn_idx, val_idx) in enumerate(", "avg_loss = 0.0 model.eval() preds = [] targs = [] for step, batch", "is not None: input = input.to(device) model(input) for bn_module in momenta.keys(): bn_module.momentum =", "save_model(model, logdir, filename) return metric else: return prev_metric def train(model, ema_model, dataloaders, optimizer,", "criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] =", "= np.concatenate(preds, axis=0) y_true = np.concatenate(targs, axis=0) return avg_loss, y_pred, y_true def eval_one_epoch(model,", "splitter.split(df, y=y)): if i not in global_params[\"folds\"]: continue logger.info(\"=\" * 20) logger.info(f\"Fold {i}\")", "in :attr:`loader` is either a tensor or a list or tuple of tensors;", "ema_model.update_parameters(model) cnt = n clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target = y.detach().cpu().numpy() preds.append(clipwise_output) targs.append(target) update_bn(dataloader,", "/ \"output.log\") utils.set_seed(global_params[\"seed\"]) device = C.get_device(global_params[\"device\"]) df, datadir = C.get_metadata(config) splitter = C.get_split(config)", "loss = criterion(outputs, y).detach() avg_loss += loss.item() / len(dataloader) clipwise_output = outputs[\"clipwise_output\"].detach().cpu().numpy() target", "C.get_event_level_labels(config) else: event_level_labels = None if \"Multilabel\" in config[\"split\"][\"name\"]: y = calltype_labels else:", "if config[\"data\"].get(\"event_level_labels\") is not None: event_level_labels = C.get_event_level_labels(config) else: event_level_labels = None if", "df_, phase in zip([trn_df, val_df], [\"train\", \"valid\"]) } model = models.get_model(config).to(device) criterion =", "import numpy as np import torch import torch.nn as nn import src.configuration as", "def __init__(self, model, device=None, avg_fn=None): super().__init__() self.module = deepcopy(model) if device is not", "train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"], optimizer=optimizer, scheduler=scheduler, criterion=criterion, device=device, n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1,", "loader to compute the activation statistics on. Each data batch should be either", ":attr:`device` before being passed into :attr:`model`. Example: >>> loader, model = ... >>>", "logger.info(f\"Fold {i}\") logger.info(\"=\" * 20) trn_df = df.loc[trn_idx, :].reset_index(drop=True) val_df = df.loc[val_idx, :].reset_index(drop=True)", "torch.no_grad(): x = batch[input_key].to(device) y = batch[input_target_key].to(device).float() outputs = model(x) loss = criterion(outputs,", "If set, data will be transferred to :attr:`device` before being passed into :attr:`model`.", "eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] = sample_f1 logger.info(\"EMA\") avg_loss, y_pred, y_true = eval_one_epoch( model=ema_model,", "criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer, config) ema_model =", "avg_loss = 0.0 model.train() preds = [] targs = [] cnt = n", "model = models.get_model(config).to(device) criterion = C.get_criterion(config).to(device) optimizer = C.get_optimizer(model, config) scheduler = C.get_scheduler(optimizer,", "= logdir / filename with open(weights_path, \"wb\") as f: torch.save(state_dict, f) def save_best_model(model,", "[{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred, y_true = train_one_epoch( model=model, ema_model=ema_model, dataloader=dataloaders[\"train\"],", "warnings import numpy as np import torch import torch.nn as nn import src.configuration", "list or tuple corresponding to the data batch. \"\"\" momenta = {} for", "module.running_var = torch.ones_like(module.running_var) momenta[module] = module.momentum if not momenta: return was_training = model.training", "== 0 and class_i_pred.sum() == 0: classwise_f1s.append(1.0) else: classwise_f1s.append(f1_score(y_true=class_i_targ, y_pred=class_i_pred)) classwise_f1 = np.mean(classwise_f1s)", "+= 1 def update_bn(loader, model, device=None, input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in", "n=n, input_key=input_key, input_target_key=input_target_key) mAP, classwise_f1, sample_f1 = calc_metrics(y_true, y_pred) train_metrics[\"loss\"] = avg_loss train_metrics[\"mAP\"]", "0.1 * averaged_model_parameter + 0.9 * model_parameter) (output_dir / f\"fold{i}\").mkdir(exist_ok=True, parents=True) train(model=model, ema_model=ema_model,", "= average_precision_score(y_true, y_pred, average=None) mAP = np.nan_to_num(mAP).mean() classwise_f1s = [] for i in", "+= 1 logger.info(\"=\" * 20) logger.info(f\"Epoch [{epoch}/{epochs}]:\") logger.info(\"=\" * 20) logger.info(\"Train\") avg_loss, y_pred,", "calc_metrics(y_true, y_pred) eval_metrics[\"loss\"] = avg_loss eval_metrics[\"mAP\"] = mAP eval_metrics[\"classwise_f1\"] = classwise_f1 eval_metrics[\"sample_f1\"] =", "estimate the activation statistics for BatchNorm layers in the model. Arguments: loader (torch.utils.data.DataLoader):", "input_key=\"\"): r\"\"\"Updates BatchNorm running_mean, running_var buffers in the model. It performs one pass" ]
[ "pertpy.api import plot as pl from pertpy.api import preprocessing as pp from pertpy.api", "dt from pertpy.api import plot as pl from pertpy.api import preprocessing as pp", "pl from pertpy.api import preprocessing as pp from pertpy.api import tools as tl", "scanpy pertpy_settings = scanpy.settings from pertpy.api import data as dt from pertpy.api import", "from pertpy.api import data as dt from pertpy.api import plot as pl from", "pertpy_settings = scanpy.settings from pertpy.api import data as dt from pertpy.api import plot", "import data as dt from pertpy.api import plot as pl from pertpy.api import", "= scanpy.settings from pertpy.api import data as dt from pertpy.api import plot as", "as dt from pertpy.api import plot as pl from pertpy.api import preprocessing as", "from pertpy.api import plot as pl from pertpy.api import preprocessing as pp from", "scanpy.settings from pertpy.api import data as dt from pertpy.api import plot as pl", "import plot as pl from pertpy.api import preprocessing as pp from pertpy.api import", "as pl from pertpy.api import preprocessing as pp from pertpy.api import tools as", "data as dt from pertpy.api import plot as pl from pertpy.api import preprocessing", "plot as pl from pertpy.api import preprocessing as pp from pertpy.api import tools", "pertpy.api import data as dt from pertpy.api import plot as pl from pertpy.api", "import scanpy pertpy_settings = scanpy.settings from pertpy.api import data as dt from pertpy.api" ]
[ "time for gyro (s) tcurr = time.perf_counter() - tstart # Updating output arrays", "input and an attached gyro. This example shows how use the gyro to", "deg./s (approx. 7.7 rad/s). Limit the motor speed % output to no more", "from scipy import integrate from pyev3.utils import plot_line from pyev3.brick import LegoEV3 from", "more than 35 %. \"\"\" # Importing modules and classes import time import", "gyro can detect without saturating is 440 deg./s (approx. 7.7 rad/s). Limit the", "= 2 # Sine wave duration (s) # Pre-allocating output arrays tmotor =", "tstart # Updating output arrays for gyro # (and converting from deg/s to", "(deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate], yname='Angular velocity (rad/s)', legend=['Tacho', 'Gyro'],", "w = np.pi/180 * np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro], [theta, angle],", "ev3.close() # Calculating motor angular velocity (rad/s) w = np.pi/180 * np.gradient(theta, tmotor)", "than 35 %. \"\"\" # Importing modules and classes import time import numpy", "time stamp motor.output = u0 * np.sin((2*np.pi/T) * tcurr) # Updating output arrays", "anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and", "and velocity by attaching it to the motor shaft. Setup: Connect one large", "motor shaft. Setup: Connect one large motor to port 'A' Connect the gyro", "from pyev3.utils import plot_line from pyev3.brick import LegoEV3 from pyev3.devices import Gyro, Motor", "current time for motor (s) tcurr = time.perf_counter() - tstart # Assigning current", "motor.reset_angle() motor.start() # Getting initial gyro sensor reading to remove drift in the", "the data angle0, rate0 = gyro.output # Initializing current time stamp and starting", "sine wave output while tcurr <= tstop: # Getting current time for motor", "Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor,", "Connect the gyro sensor to port number 1. Notes: 1. Remember there's a", "gyro sensor reading to remove drift in the data angle0, rate0 = gyro.output", "use the gyro to measure angular position and velocity by attaching it to", "and classes import time import numpy as np from scipy import integrate from", "The maximum angular speed that the gyro can detect without saturating is 440", "to the sensor, so limit the rotation angle to approx. 180 degrees. 2.", "velocity by attaching it to the motor shaft. Setup: Connect one large motor", "no more than 35 %. \"\"\" # Importing modules and classes import time", "the rotation angle to approx. 180 degrees. 2. The maximum angular speed that", "Setup: Connect one large motor to port 'A' Connect the gyro sensor to", "= time.perf_counter() - tstart # Assigning current motor sinusoidal # output using the", "amplitude (%) tstop = 2 # Sine wave duration (s) # Pre-allocating output", "motor speed % output to no more than 35 %. \"\"\" # Importing", "Calculating motor angular velocity (rad/s) w = np.pi/180 * np.gradient(theta, tmotor) # Plotting", "by attaching it to the motor shaft. Setup: Connect one large motor to", "objects ev3 = LegoEV3() motor = Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate')", "# output using the current time stamp motor.output = u0 * np.sin((2*np.pi/T) *", "# Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True)", "gyro.output # Initializing current time stamp and starting clock tcurr = 0 tstart", "[] tgyro = [] angle = [] rate = [] # Creating LEGO", "[] angle = [] rate = [] # Creating LEGO EV3 objects ev3", "initial gyro sensor reading to remove drift in the data angle0, rate0 =", "ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and closing", "arrays for gyro # (and converting from deg/s to rad/s) anglecurr, ratecurr =", "35 %. \"\"\" # Importing modules and classes import time import numpy as", "Pre-allocating output arrays tmotor = [] theta = [] tgyro = [] angle", "without saturating is 440 deg./s (approx. 7.7 rad/s). Limit the motor speed %", "tmotor) # Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'],", "# Getting current time for motor (s) tcurr = time.perf_counter() - tstart #", "stamp motor.output = u0 * np.sin((2*np.pi/T) * tcurr) # Updating output arrays for", "= [] # Creating LEGO EV3 objects ev3 = LegoEV3() motor = Motor(ev3,", "%. \"\"\" # Importing modules and classes import time import numpy as np", "example shows how use the gyro to measure angular position and velocity by", "as np from scipy import integrate from pyev3.utils import plot_line from pyev3.brick import", "tstart = time.perf_counter() # Running motor sine wave output while tcurr <= tstop:", "yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate], yname='Angular velocity (rad/s)',", "import Gyro, Motor # Defining parameters (for one motor) T = 2 #", "number 1. Notes: 1. Remember there's a cable attached to the sensor, so", "sinusoidal # output using the current time stamp motor.output = u0 * np.sin((2*np.pi/T)", "gyro (s) tcurr = time.perf_counter() - tstart # Updating output arrays for gyro", "motor sinusoidal # output using the current time stamp motor.output = u0 *", "tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for gyro (s) tcurr = time.perf_counter() -", "= Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode =", "angle0, rate0 = gyro.output # Initializing current time stamp and starting clock tcurr", "for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for gyro (s) tcurr =", "time.perf_counter() # Running motor sine wave output while tcurr <= tstop: # Getting", "u0 * np.sin((2*np.pi/T) * tcurr) # Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle)", "data angle0, rate0 = gyro.output # Initializing current time stamp and starting clock", "to approx. 180 degrees. 2. The maximum angular speed that the gyro can", "numpy as np from scipy import integrate from pyev3.utils import plot_line from pyev3.brick", "tmotor = [] theta = [] tgyro = [] angle = [] rate", "motor and closing brick connection motor.stop(brake='off') ev3.close() # Calculating motor angular velocity (rad/s)", "tcurr) # Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time", "rate0 = gyro.output # Initializing current time stamp and starting clock tcurr =", "# (and converting from deg/s to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0)", "wave (s) u0 = 30 # Motor speed amplitude (%) tstop = 2", "and starting clock tcurr = 0 tstart = time.perf_counter() # Running motor sine", "an attached gyro. This example shows how use the gyro to measure angular", "cable attached to the sensor, so limit the rotation angle to approx. 180", "motor.output = u0 * np.sin((2*np.pi/T) * tcurr) # Updating output arrays for motor", "[] # Creating LEGO EV3 objects ev3 = LegoEV3() motor = Motor(ev3, port='A')", "can detect without saturating is 440 deg./s (approx. 7.7 rad/s). Limit the motor", "current motor sinusoidal # output using the current time stamp motor.output = u0", "motor.stop(brake='off') ev3.close() # Calculating motor angular velocity (rad/s) w = np.pi/180 * np.gradient(theta,", "Importing modules and classes import time import numpy as np from scipy import", "motor sine wave output while tcurr <= tstop: # Getting current time for", "[theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate], yname='Angular", "= [] rate = [] # Creating LEGO EV3 objects ev3 = LegoEV3()", "Creating LEGO EV3 objects ev3 = LegoEV3() motor = Motor(ev3, port='A') gyro =", "for gyro (s) tcurr = time.perf_counter() - tstart # Updating output arrays for", "arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for gyro (s) tcurr", "LegoEV3 from pyev3.devices import Gyro, Motor # Defining parameters (for one motor) T", "# Updating output arrays for gyro # (and converting from deg/s to rad/s)", "time for motor (s) tcurr = time.perf_counter() - tstart # Assigning current motor", "output to no more than 35 %. \"\"\" # Importing modules and classes", "- tstart # Assigning current motor sinusoidal # output using the current time", "u0 = 30 # Motor speed amplitude (%) tstop = 2 # Sine", "gyro. This example shows how use the gyro to measure angular position and", "7.7 rad/s). Limit the motor speed % output to no more than 35", "rad/s). Limit the motor speed % output to no more than 35 %.", "theta.append(motor.angle) # Getting current time for gyro (s) tcurr = time.perf_counter() - tstart", "angular speed that the gyro can detect without saturating is 440 deg./s (approx.", "time stamp and starting clock tcurr = 0 tstart = time.perf_counter() # Running", "# Assigning current motor sinusoidal # output using the current time stamp motor.output", "30 # Motor speed amplitude (%) tstop = 2 # Sine wave duration", "tcurr = time.perf_counter() - tstart # Updating output arrays for gyro # (and", "time.perf_counter() - tstart # Updating output arrays for gyro # (and converting from", "This example shows how use the gyro to measure angular position and velocity", "(s) tcurr = time.perf_counter() - tstart # Assigning current motor sinusoidal # output", "attaching it to the motor shaft. Setup: Connect one large motor to port", "stamp and starting clock tcurr = 0 tstart = time.perf_counter() # Running motor", "to port 'A' Connect the gyro sensor to port number 1. Notes: 1.", "modules and classes import time import numpy as np from scipy import integrate", "# Stopping motor and closing brick connection motor.stop(brake='off') ev3.close() # Calculating motor angular", "440 deg./s (approx. 7.7 rad/s). Limit the motor speed % output to no", "= LegoEV3() motor = Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing", "np.pi/180 * np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position", "shows how use the gyro to measure angular position and velocity by attaching", "output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for gyro (s)", "# Period of sine wave (s) u0 = 30 # Motor speed amplitude", "and closing brick connection motor.stop(brake='off') ev3.close() # Calculating motor angular velocity (rad/s) w", "# Defining parameters (for one motor) T = 2 # Period of sine", "np.sin((2*np.pi/T) * tcurr) # Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting", "Period of sine wave (s) u0 = 30 # Motor speed amplitude (%)", "Notes: 1. Remember there's a cable attached to the sensor, so limit the", "angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate], yname='Angular velocity", "speed amplitude (%) tstop = 2 # Sine wave duration (s) # Pre-allocating", "1. Notes: 1. Remember there's a cable attached to the sensor, so limit", "tcurr = time.perf_counter() - tstart # Assigning current motor sinusoidal # output using", "import time import numpy as np from scipy import integrate from pyev3.utils import", "[] rate = [] # Creating LEGO EV3 objects ev3 = LegoEV3() motor", "motor.outputmode = 'speed' motor.output = 0 motor.reset_angle() motor.start() # Getting initial gyro sensor", "pyev3.devices import Gyro, Motor # Defining parameters (for one motor) T = 2", "current time stamp and starting clock tcurr = 0 tstart = time.perf_counter() #", "angular position and velocity by attaching it to the motor shaft. Setup: Connect", "LegoEV3() motor = Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor", "motor angular velocity (rad/s) w = np.pi/180 * np.gradient(theta, tmotor) # Plotting results", "= 0 tstart = time.perf_counter() # Running motor sine wave output while tcurr", "* np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)',", "the gyro sensor to port number 1. Notes: 1. Remember there's a cable", "Run one motor with a sinusoidal speed input and an attached gyro. This", "sensor reading to remove drift in the data angle0, rate0 = gyro.output #", "one motor) T = 2 # Period of sine wave (s) u0 =", "measure angular position and velocity by attaching it to the motor shaft. Setup:", "import LegoEV3 from pyev3.devices import Gyro, Motor # Defining parameters (for one motor)", "sensor, so limit the rotation angle to approx. 180 degrees. 2. The maximum", "(ratecurr-rate0)) # Stopping motor and closing brick connection motor.stop(brake='off') ev3.close() # Calculating motor", "for gyro # (and converting from deg/s to rad/s) anglecurr, ratecurr = gyro.output", "# Pre-allocating output arrays tmotor = [] theta = [] tgyro = []", "np from scipy import integrate from pyev3.utils import plot_line from pyev3.brick import LegoEV3", "port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed' motor.output", "pyev3.brick import LegoEV3 from pyev3.devices import Gyro, Motor # Defining parameters (for one", "(and converting from deg/s to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180", "# Running motor sine wave output while tcurr <= tstop: # Getting current", "speed % output to no more than 35 %. \"\"\" # Importing modules", "motor to port 'A' Connect the gyro sensor to port number 1. Notes:", "wave output while tcurr <= tstop: # Getting current time for motor (s)", "= [] theta = [] tgyro = [] angle = [] rate =", "the motor shaft. Setup: Connect one large motor to port 'A' Connect the", "ev3 = LegoEV3() motor = Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') #", "connection motor.stop(brake='off') ev3.close() # Calculating motor angular velocity (rad/s) w = np.pi/180 *", "(%) tstop = 2 # Sine wave duration (s) # Pre-allocating output arrays", "arrays tmotor = [] theta = [] tgyro = [] angle = []", "the current time stamp motor.output = u0 * np.sin((2*np.pi/T) * tcurr) # Updating", "one motor with a sinusoidal speed input and an attached gyro. This example", "tstop: # Getting current time for motor (s) tcurr = time.perf_counter() - tstart", "= 0 motor.reset_angle() motor.start() # Getting initial gyro sensor reading to remove drift", "scipy import integrate from pyev3.utils import plot_line from pyev3.brick import LegoEV3 from pyev3.devices", "= 'speed' motor.output = 0 motor.reset_angle() motor.start() # Getting initial gyro sensor reading", "rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor", "Sine wave duration (s) # Pre-allocating output arrays tmotor = [] theta =", "in the data angle0, rate0 = gyro.output # Initializing current time stamp and", "Remember there's a cable attached to the sensor, so limit the rotation angle", "deg/s to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) #", "tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and closing brick connection motor.stop(brake='off')", "* np.sin((2*np.pi/T) * tcurr) # Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) #", "gyro sensor to port number 1. Notes: 1. Remember there's a cable attached", "pyev3.utils import plot_line from pyev3.brick import LegoEV3 from pyev3.devices import Gyro, Motor #", "= 2 # Period of sine wave (s) u0 = 30 # Motor", "(approx. 7.7 rad/s). Limit the motor speed % output to no more than", "clock tcurr = 0 tstart = time.perf_counter() # Running motor sine wave output", "speed input and an attached gyro. This example shows how use the gyro", "to no more than 35 %. \"\"\" # Importing modules and classes import", "of sine wave (s) u0 = 30 # Motor speed amplitude (%) tstop", "time import numpy as np from scipy import integrate from pyev3.utils import plot_line", "legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate], yname='Angular velocity (rad/s)', legend=['Tacho', 'Gyro'], marker=True)", "brick connection motor.stop(brake='off') ev3.close() # Calculating motor angular velocity (rad/s) w = np.pi/180", "Defining parameters (for one motor) T = 2 # Period of sine wave", "Assigning current motor sinusoidal # output using the current time stamp motor.output =", "converting from deg/s to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 *", "import plot_line from pyev3.brick import LegoEV3 from pyev3.devices import Gyro, Motor # Defining", "how use the gyro to measure angular position and velocity by attaching it", "motor = Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode", "starting clock tcurr = 0 tstart = time.perf_counter() # Running motor sine wave", "= u0 * np.sin((2*np.pi/T) * tcurr) # Updating output arrays for motor tmotor.append(tcurr)", "from pyev3.brick import LegoEV3 from pyev3.devices import Gyro, Motor # Defining parameters (for", "1. Remember there's a cable attached to the sensor, so limit the rotation", "Connect one large motor to port 'A' Connect the gyro sensor to port", "rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and closing brick connection motor.stop(brake='off') ev3.close() #", "one large motor to port 'A' Connect the gyro sensor to port number", "tstop = 2 # Sine wave duration (s) # Pre-allocating output arrays tmotor", "(s) # Pre-allocating output arrays tmotor = [] theta = [] tgyro =", "theta = [] tgyro = [] angle = [] rate = [] #", "with a sinusoidal speed input and an attached gyro. This example shows how", "Stopping motor and closing brick connection motor.stop(brake='off') ev3.close() # Calculating motor angular velocity", "tcurr <= tstop: # Getting current time for motor (s) tcurr = time.perf_counter()", "to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping", "Getting current time for gyro (s) tcurr = time.perf_counter() - tstart # Updating", "0 tstart = time.perf_counter() # Running motor sine wave output while tcurr <=", "the motor speed % output to no more than 35 %. \"\"\" #", "Gyro, Motor # Defining parameters (for one motor) T = 2 # Period", "portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed' motor.output = 0 motor.reset_angle() motor.start()", "results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro],", "reading to remove drift in the data angle0, rate0 = gyro.output # Initializing", "* (ratecurr-rate0)) # Stopping motor and closing brick connection motor.stop(brake='off') ev3.close() # Calculating", "speed that the gyro can detect without saturating is 440 deg./s (approx. 7.7", "Motor speed amplitude (%) tstop = 2 # Sine wave duration (s) #", "sine wave (s) u0 = 30 # Motor speed amplitude (%) tstop =", "motor.start() # Getting initial gyro sensor reading to remove drift in the data", "plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w,", "drift in the data angle0, rate0 = gyro.output # Initializing current time stamp", "limit the rotation angle to approx. 180 degrees. 2. The maximum angular speed", "Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed' motor.output = 0 motor.reset_angle()", "current time for gyro (s) tcurr = time.perf_counter() - tstart # Updating output", "'speed' motor.output = 0 motor.reset_angle() motor.start() # Getting initial gyro sensor reading to", "# Calculating motor angular velocity (rad/s) w = np.pi/180 * np.gradient(theta, tmotor) #", "attached to the sensor, so limit the rotation angle to approx. 180 degrees.", "approx. 180 degrees. 2. The maximum angular speed that the gyro can detect", "0 motor.reset_angle() motor.start() # Getting initial gyro sensor reading to remove drift in", "gyro # (and converting from deg/s to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr)", "Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for gyro", "# Creating LEGO EV3 objects ev3 = LegoEV3() motor = Motor(ev3, port='A') gyro", "rotation angle to approx. 180 degrees. 2. The maximum angular speed that the", "= time.perf_counter() - tstart # Updating output arrays for gyro # (and converting", "angular velocity (rad/s) w = np.pi/180 * np.gradient(theta, tmotor) # Plotting results plot_line([tmotor,", "EV3 objects ev3 = LegoEV3() motor = Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1,", "from deg/s to rad/s) anglecurr, ratecurr = gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0))", "there's a cable attached to the sensor, so limit the rotation angle to", "2 # Period of sine wave (s) u0 = 30 # Motor speed", "to measure angular position and velocity by attaching it to the motor shaft.", "motor motor.outputmode = 'speed' motor.output = 0 motor.reset_angle() motor.start() # Getting initial gyro", "large motor to port 'A' Connect the gyro sensor to port number 1.", "* tcurr) # Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current", "a cable attached to the sensor, so limit the rotation angle to approx.", "# Updating output arrays for motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for", "Getting initial gyro sensor reading to remove drift in the data angle0, rate0", "\"\"\" # Importing modules and classes import time import numpy as np from", "= gyro.output # Initializing current time stamp and starting clock tcurr = 0", "plot_line from pyev3.brick import LegoEV3 from pyev3.devices import Gyro, Motor # Defining parameters", "Motor # Defining parameters (for one motor) T = 2 # Period of", "position and velocity by attaching it to the motor shaft. Setup: Connect one", "motor with a sinusoidal speed input and an attached gyro. This example shows", "saturating is 440 deg./s (approx. 7.7 rad/s). Limit the motor speed % output", "= Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed' motor.output = 0", "duration (s) # Pre-allocating output arrays tmotor = [] theta = [] tgyro", "time.perf_counter() - tstart # Assigning current motor sinusoidal # output using the current", "output using the current time stamp motor.output = u0 * np.sin((2*np.pi/T) * tcurr)", "Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate], yname='Angular velocity (rad/s)', legend=['Tacho',", "'A' Connect the gyro sensor to port number 1. Notes: 1. Remember there's", "(s) tcurr = time.perf_counter() - tstart # Updating output arrays for gyro #", "current time stamp motor.output = u0 * np.sin((2*np.pi/T) * tcurr) # Updating output", "output while tcurr <= tstop: # Getting current time for motor (s) tcurr", "= [] angle = [] rate = [] # Creating LEGO EV3 objects", "so limit the rotation angle to approx. 180 degrees. 2. The maximum angular", "the gyro to measure angular position and velocity by attaching it to the", "sensor to port number 1. Notes: 1. Remember there's a cable attached to", "and an attached gyro. This example shows how use the gyro to measure", "# Motor speed amplitude (%) tstop = 2 # Sine wave duration (s)", "the sensor, so limit the rotation angle to approx. 180 degrees. 2. The", "that the gyro can detect without saturating is 440 deg./s (approx. 7.7 rad/s).", "port number 1. Notes: 1. Remember there's a cable attached to the sensor,", "= gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and closing brick", "motor tmotor.append(tcurr) theta.append(motor.angle) # Getting current time for gyro (s) tcurr = time.perf_counter()", "output arrays tmotor = [] theta = [] tgyro = [] angle =", "tstart # Assigning current motor sinusoidal # output using the current time stamp", "using the current time stamp motor.output = u0 * np.sin((2*np.pi/T) * tcurr) #", "sinusoidal speed input and an attached gyro. This example shows how use the", "integrate from pyev3.utils import plot_line from pyev3.brick import LegoEV3 from pyev3.devices import Gyro,", "remove drift in the data angle0, rate0 = gyro.output # Initializing current time", "= np.pi/180 * np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular", "np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho',", "Getting current time for motor (s) tcurr = time.perf_counter() - tstart # Assigning", "gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed' motor.output =", "angle to approx. 180 degrees. 2. The maximum angular speed that the gyro", "import integrate from pyev3.utils import plot_line from pyev3.brick import LegoEV3 from pyev3.devices import", "[] theta = [] tgyro = [] angle = [] rate = []", "180 degrees. 2. The maximum angular speed that the gyro can detect without", "<= tstop: # Getting current time for motor (s) tcurr = time.perf_counter() -", "gyro.output tgyro.append(tcurr) angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and closing brick connection", "(for one motor) T = 2 # Period of sine wave (s) u0", "motor (s) tcurr = time.perf_counter() - tstart # Assigning current motor sinusoidal #", "Running motor sine wave output while tcurr <= tstop: # Getting current time", "tgyro = [] angle = [] rate = [] # Creating LEGO EV3", "# Sine wave duration (s) # Pre-allocating output arrays tmotor = [] theta", "to port number 1. Notes: 1. Remember there's a cable attached to the", "Initializing current time stamp and starting clock tcurr = 0 tstart = time.perf_counter()", "angle.append(anglecurr-angle0) rate.append(np.pi/180 * (ratecurr-rate0)) # Stopping motor and closing brick connection motor.stop(brake='off') ev3.close()", "from pyev3.devices import Gyro, Motor # Defining parameters (for one motor) T =", "\"\"\" gyrodata.py Run one motor with a sinusoidal speed input and an attached", "gyrodata.py Run one motor with a sinusoidal speed input and an attached gyro.", "degrees. 2. The maximum angular speed that the gyro can detect without saturating", "= 30 # Motor speed amplitude (%) tstop = 2 # Sine wave", "classes import time import numpy as np from scipy import integrate from pyev3.utils", "Initializing motor motor.outputmode = 'speed' motor.output = 0 motor.reset_angle() motor.start() # Getting initial", "= time.perf_counter() # Running motor sine wave output while tcurr <= tstop: #", "to the motor shaft. Setup: Connect one large motor to port 'A' Connect", "= [] tgyro = [] angle = [] rate = [] # Creating", "the gyro can detect without saturating is 440 deg./s (approx. 7.7 rad/s). Limit", "output arrays for gyro # (and converting from deg/s to rad/s) anglecurr, ratecurr", "(rad/s) w = np.pi/180 * np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro], [theta,", "rate = [] # Creating LEGO EV3 objects ev3 = LegoEV3() motor =", "# Importing modules and classes import time import numpy as np from scipy", "for motor (s) tcurr = time.perf_counter() - tstart # Assigning current motor sinusoidal", "T = 2 # Period of sine wave (s) u0 = 30 #", "% output to no more than 35 %. \"\"\" # Importing modules and", "(s) u0 = 30 # Motor speed amplitude (%) tstop = 2 #", "Motor(ev3, port='A') gyro = Gyro(ev3, portnum=1, inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed'", "attached gyro. This example shows how use the gyro to measure angular position", "angle = [] rate = [] # Creating LEGO EV3 objects ev3 =", "velocity (rad/s) w = np.pi/180 * np.gradient(theta, tmotor) # Plotting results plot_line([tmotor, tgyro],", "- tstart # Updating output arrays for gyro # (and converting from deg/s", "# Initializing motor motor.outputmode = 'speed' motor.output = 0 motor.reset_angle() motor.start() # Getting", "port 'A' Connect the gyro sensor to port number 1. Notes: 1. Remember", "LEGO EV3 objects ev3 = LegoEV3() motor = Motor(ev3, port='A') gyro = Gyro(ev3,", "to remove drift in the data angle0, rate0 = gyro.output # Initializing current", "is 440 deg./s (approx. 7.7 rad/s). Limit the motor speed % output to", "# Getting current time for gyro (s) tcurr = time.perf_counter() - tstart #", "inputmode='angle&rate') # Initializing motor motor.outputmode = 'speed' motor.output = 0 motor.reset_angle() motor.start() #", "2 # Sine wave duration (s) # Pre-allocating output arrays tmotor = []", "maximum angular speed that the gyro can detect without saturating is 440 deg./s", "wave duration (s) # Pre-allocating output arrays tmotor = [] theta = []", "Limit the motor speed % output to no more than 35 %. \"\"\"", "while tcurr <= tstop: # Getting current time for motor (s) tcurr =", "Updating output arrays for gyro # (and converting from deg/s to rad/s) anglecurr,", "parameters (for one motor) T = 2 # Period of sine wave (s)", "tgyro], [theta, angle], yname='Angular Position (deg.)', legend=['Tacho', 'Gyro'], marker=True) plot_line([tmotor, tgyro], [w, rate],", "# Initializing current time stamp and starting clock tcurr = 0 tstart =", "shaft. Setup: Connect one large motor to port 'A' Connect the gyro sensor", "gyro to measure angular position and velocity by attaching it to the motor", "motor) T = 2 # Period of sine wave (s) u0 = 30", "it to the motor shaft. Setup: Connect one large motor to port 'A'", "# Getting initial gyro sensor reading to remove drift in the data angle0,", "import numpy as np from scipy import integrate from pyev3.utils import plot_line from", "2. The maximum angular speed that the gyro can detect without saturating is", "detect without saturating is 440 deg./s (approx. 7.7 rad/s). Limit the motor speed", "motor.output = 0 motor.reset_angle() motor.start() # Getting initial gyro sensor reading to remove", "tcurr = 0 tstart = time.perf_counter() # Running motor sine wave output while", "closing brick connection motor.stop(brake='off') ev3.close() # Calculating motor angular velocity (rad/s) w =", "a sinusoidal speed input and an attached gyro. This example shows how use" ]
[ "import __main__ import May9_Next def initializePlugin(*args): __main__.May9_Next = May9_Next def uninitializePlugin(*args): try: __main__.__delattr__(\"May9_Next\")", "May9_Next def initializePlugin(*args): __main__.May9_Next = May9_Next def uninitializePlugin(*args): try: __main__.__delattr__(\"May9_Next\") except AttributeError: pass", "import May9_Next def initializePlugin(*args): __main__.May9_Next = May9_Next def uninitializePlugin(*args): try: __main__.__delattr__(\"May9_Next\") except AttributeError:", "__main__ import May9_Next def initializePlugin(*args): __main__.May9_Next = May9_Next def uninitializePlugin(*args): try: __main__.__delattr__(\"May9_Next\") except", "<reponame>DavideAlidosi/May9 import __main__ import May9_Next def initializePlugin(*args): __main__.May9_Next = May9_Next def uninitializePlugin(*args): try:" ]
[ "import GraphQLEnumType as EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d", "as EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d", "<reponame>dduraipandian/scrapqd<filename>scrapqd/gql/enum/browser.py from graphql import GraphQLEnumType as EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum =", "EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()}, description=\"Browser option in the selenium executor\")", "BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()}, description=\"Browser option in", "from graphql import GraphQLEnumType as EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\",", "BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()}, description=\"Browser option in the", "= EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()}, description=\"Browser option in the selenium", "scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()}, description=\"Browser", "import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()}, description=\"Browser option", "EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d in", "from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for d in BrowserFactory().mapping()},", "GraphQLEnumType as EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper(): d for", "graphql import GraphQLEnumType as EnumType from scrapqd.executor.selenium_driver.factory import BrowserFactory BrowserEnum = EnumType(\"Browser\", {d.upper():" ]
[ "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "__init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip", "specific language governing permissions and limitations under the License. \"\"\" import json try:", "{ **super().json(), **self.dataset.json() } @property def augmented_images_count(self) -> int: augmented_images_count = 0 if", "self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def", "= Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False,", "relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data)", "Column, Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm import relationship, backref from wb.main.enumerates", "json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return { **super().json(), **self.dataset.json() } @property def augmented_images_count(self)", "License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel,", "# pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value", "Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images =", "the License for the specific language governing permissions and limitations under the License.", "language governing permissions and limitations under the License. \"\"\" import json try: from", "JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip:", "= Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images", "Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection =", "= augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return { **super().json(), **self.dataset.json()", "augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages']", "License for the specific language governing permissions and limitations under the License. \"\"\"", "Class for ORM model describing dataset augmentation job Copyright (c) 2021 Intel Corporation", "Unless required by applicable law or agreed to in writing, software distributed under", "describing dataset augmentation job Copyright (c) 2021 Intel Corporation Licensed under the Apache", "ORM model describing dataset augmentation job Copyright (c) 2021 Intel Corporation Licensed under", "typing import TypedDict except ImportError: from typing_extensions import TypedDict from sqlalchemy import Column,", "bool noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes class", "try: from typing import TypedDict except ImportError: from typing_extensions import TypedDict from sqlalchemy", "self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict:", "the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "License, Version 2.0 (the \"License\"); you may not use this file except in", "self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images", "Copyright (c) 2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the", "-> int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count", "import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool", "self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections", "**self.dataset.json() } @property def augmented_images_count(self) -> int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count", "cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip", "for the specific language governing permissions and limitations under the License. \"\"\" import", "disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id", "'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False)", "{ 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id),", "= Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job',", "} job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip =", "Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio =", "software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "by applicable law or agreed to in writing, software distributed under the License", "eraseImages: int applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections: str #", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License", "= augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio =", "= augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections =", "applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages: int applyNoise: bool noiseRatio:", "if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count += 1 if self.apply_image_corrections: augmented_images_count", "from sqlalchemy import Column, Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm import relationship,", "OpenVINO DL Workbench Class for ORM model describing dataset augmentation job Copyright (c)", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "augmented_images_count = 0 if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images", "in compliance with the License. You may obtain a copy of the License", "default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer,", "KIND, either express or implied. See the License for the specific language governing", "Text from sqlalchemy.orm import relationship, backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import", "JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages: int", "self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count +=", "self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections", "in writing, software distributed under the License is distributed on an \"AS IS\"", "JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip", "DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase:", "writing, software distributed under the License is distributed on an \"AS IS\" BASIS,", "nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float,", "image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def", "or agreed to in writing, software distributed under the License is distributed on", "governing permissions and limitations under the License. \"\"\" import json try: from typing", "applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes", "Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data:", "self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count += 1 if self.apply_image_corrections: augmented_images_count +=", "dict: return { **super().json(), **self.dataset.json() } @property def augmented_images_count(self) -> int: augmented_images_count =", "= json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return { **super().json(), **self.dataset.json() } @property def", "pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value }", "if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count", "+= 1 if self.vertical_flip: augmented_images_count += 1 if self.apply_image_corrections: augmented_images_count += len(self.image_corrections) return", "def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip']", "class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages: int applyNoise:", "= relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData):", "OR CONDITIONS OF ANY KIND, either express or implied. See the License for", "OF ANY KIND, either express or implied. See the License for the specific", "TypedDict except ImportError: from typing_extensions import TypedDict from sqlalchemy import Column, Integer, ForeignKey,", "augmented_images_count(self) -> int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection:", "@property def augmented_images_count(self) -> int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count += self.erase_images", "augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages']", "may not use this file except in compliance with the License. You may", "= augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images =", "self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio", "augmentation job Copyright (c) 2021 Intel Corporation Licensed under the Apache License, Version", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "bool imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ =", "and limitations under the License. \"\"\" import json try: from typing import TypedDict", "typing_extensions import TypedDict from sqlalchemy import Column, Integer, ForeignKey, Boolean, Float, Text from", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "eraseRatio: int eraseImages: int applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections:", "-> dict: return { **super().json(), **self.dataset.json() } @property def augmented_images_count(self) -> int: augmented_images_count", "int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count +=", "self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip:", "from typing_extensions import TypedDict from sqlalchemy import Column, Integer, ForeignKey, Boolean, Float, Text", "int noiseImages: int applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__", "= Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True)", "= { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer,", "augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) ->", "json(self) -> dict: return { **super().json(), **self.dataset.json() } @property def augmented_images_count(self) -> int:", "See the License for the specific language governing permissions and limitations under the", "imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = {", "+= self.noise_images if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count += 1 if", "TypedDict from sqlalchemy import Column, Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm import", "= Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False,", "default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio =", "self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count +=", "import Column, Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm import relationship, backref from", "default=False) noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False,", "int applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections: str # pylint:", "'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id", "= augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio =", "this file except in compliance with the License. You may obtain a copy", "apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id],", "class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id =", "import json try: from typing import TypedDict except ImportError: from typing_extensions import TypedDict", "\"License\"); you may not use this file except in compliance with the License.", "Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False)", "uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip =", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "you may not use this file except in compliance with the License. You", "agreed to in writing, software distributed under the License is distributed on an", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "default=False) erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False,", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "implied. See the License for the specific language governing permissions and limitations under", "import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages:", "augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio']", "def json(self) -> dict: return { **super().json(), **self.dataset.json() } @property def augmented_images_count(self) ->", "ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase", "use this file except in compliance with the License. You may obtain a", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use", "DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool", "= Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections", "default=False) image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False))", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False)", "required by applicable law or agreed to in writing, software distributed under the", "Boolean, Float, Text from sqlalchemy.orm import relationship, backref from wb.main.enumerates import JobTypesEnum from", "wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip:", "ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip", "if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count", "Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery',", "sqlalchemy import Column, Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm import relationship, backref", "**super().json(), **self.dataset.json() } @property def augmented_images_count(self) -> int: augmented_images_count = 0 if self.apply_random_erase:", "ForeignKey, Boolean, Float, Text from sqlalchemy.orm import relationship, backref from wb.main.enumerates import JobTypesEnum", "Intel Corporation Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "not use this file except in compliance with the License. You may obtain", "= 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True)", "bool applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages: int applyNoise: bool noiseRatio: int", "Corporation Licensed under the Apache License, Version 2.0 (the \"License\"); you may not", "} @property def augmented_images_count(self) -> int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count +=", "noiseImages: int applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ =", "= augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self)", "0 if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip:", "wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class", "ANY KIND, either express or implied. See the License for the specific language", "file except in compliance with the License. You may obtain a copy of", "noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel):", "augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio']", "augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection", "2.0 (the \"License\"); you may not use this file except in compliance with", "self.noise_images if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count += 1 if self.apply_image_corrections:", "self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True)", "the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean,", "str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity':", "\"\"\" import json try: from typing import TypedDict except ImportError: from typing_extensions import", "DL Workbench Class for ORM model describing dataset augmentation job Copyright (c) 2021", "job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean,", "(the \"License\"); you may not use this file except in compliance with the", "self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return { **super().json(),", "under the License. \"\"\" import json try: from typing import TypedDict except ImportError:", "nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData,", "backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import", "augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return { **super().json(), **self.dataset.json() }", "bool eraseRatio: int eraseImages: int applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections: bool", "except ImportError: from typing_extensions import TypedDict from sqlalchemy import Column, Integer, ForeignKey, Boolean,", "foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id", "from typing import TypedDict except ImportError: from typing_extensions import TypedDict from sqlalchemy import", "erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True)", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed", "(c) 2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the \"License\");", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "= augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections =", "DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip']", "Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections =", "nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio", "augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections']", "= Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections", "law or agreed to in writing, software distributed under the License is distributed", "int eraseImages: int applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections: bool imageCorrections: str", "ImportError: from typing_extensions import TypedDict from sqlalchemy import Column, Integer, ForeignKey, Boolean, Float,", "Version 2.0 (the \"License\"); you may not use this file except in compliance", "dataset augmentation job Copyright (c) 2021 Intel Corporation Licensed under the Apache License,", "the Apache License, Version 2.0 (the \"License\"); you may not use this file", "def augmented_images_count(self) -> int: augmented_images_count = 0 if self.apply_random_erase: augmented_images_count += self.erase_images if", "DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection =", "Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm import relationship, backref from wb.main.enumerates import", "from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel", "from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool", "under the Apache License, Version 2.0 (the \"License\"); you may not use this", "nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all',", "either express or implied. See the License for the specific language governing permissions", "self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase", "nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections = Column(Text,", "self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return { **super().json(), **self.dataset.json() } @property", "super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise']", "Apache License, Version 2.0 (the \"License\"); you may not use this file except", "or implied. See the License for the specific language governing permissions and limitations", "vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float,", "= Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio", "DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages: int applyNoise: bool", "job Copyright (c) 2021 Intel Corporation Licensed under the Apache License, Version 2.0", "2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the \"License\"); you", "the specific language governing permissions and limitations under the License. \"\"\" import json", "Float, Text from sqlalchemy.orm import relationship, backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model", "Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections =", "Workbench Class for ORM model describing dataset augmentation job Copyright (c) 2021 Intel", "CONDITIONS OF ANY KIND, either express or implied. See the License for the", "DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer,", "+= self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count += 1 if", "import TypedDict from sqlalchemy import Column, Integer, ForeignKey, Boolean, Float, Text from sqlalchemy.orm", "to in writing, software distributed under the License is distributed on an \"AS", "__mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id =", "limitations under the License. \"\"\" import json try: from typing import TypedDict except", "import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict):", "json try: from typing import TypedDict except ImportError: from typing_extensions import TypedDict from", "except in compliance with the License. You may obtain a copy of the", "erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False)", "int applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs'", "dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data:", "applyImageCorrections: bool imageCorrections: str # pylint: disable=too-many-instance-attributes class DatasetAugmentationJobModel(JobsModel): __tablename__ = 'dataset_augmentation_jobs' __mapper_args__", "backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id =", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean,", "= augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images =", "import TypedDict except ImportError: from typing_extensions import TypedDict from sqlalchemy import Column, Integer,", "augmented_images_count += 1 if self.vertical_flip: augmented_images_count += 1 if self.apply_image_corrections: augmented_images_count += len(self.image_corrections)", "License. \"\"\" import json try: from typing import TypedDict except ImportError: from typing_extensions", "wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio: int", "apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer, nullable=True)", "1 if self.vertical_flip: augmented_images_count += 1 if self.apply_image_corrections: augmented_images_count += len(self.image_corrections) return augmented_images_count", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,", "for ORM model describing dataset augmentation job Copyright (c) 2021 Intel Corporation Licensed", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return {", "permissions and limitations under the License. \"\"\" import json try: from typing import", "applyErase: bool eraseRatio: int eraseImages: int applyNoise: bool noiseRatio: int noiseImages: int applyImageCorrections:", "self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase'] self.erase_images = augmentation_data['eraseImages'] self.erase_ratio = augmentation_data['eraseRatio'] self.noise_ratio", "model describing dataset augmentation job Copyright (c) 2021 Intel Corporation Licensed under the", "augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count += 1", "data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip =", "augmentation_data['eraseRatio'] self.noise_ratio = augmentation_data['noiseRatio'] self.noise_images = augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections'])", "= Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False,", "Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset =", "compliance with the License. You may obtain a copy of the License at", "from sqlalchemy.orm import relationship, backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel,", "\"\"\" OpenVINO DL Workbench Class for ORM model describing dataset augmentation job Copyright", "nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images =", "= Column(Text, nullable=True) dataset = relationship(DatasetsModel, foreign_keys=[dataset_id], backref=backref('dataset_augmentation_job', lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self,", "dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean,", "express or implied. See the License for the specific language governing permissions and", "= augmentation_data['noiseImages'] self.apply_image_corrections = augmentation_data['applyImageCorrections'] self.image_corrections = json.dumps(augmentation_data['imageCorrections']) def json(self) -> dict: return", "the License. \"\"\" import json try: from typing import TypedDict except ImportError: from", "lazy='subquery', cascade='delete,all', uselist=False)) def __init__(self, data: DatasetJobData, augmentation_data: DatasetAugmentationJobData): super().__init__(data) self.dataset_id = data['datasetId']", "nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase =", "sqlalchemy.orm import relationship, backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData", "bool applyErase: bool eraseRatio: int eraseImages: int applyNoise: bool noiseRatio: int noiseImages: int", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection = Column(Boolean,", "import relationship, backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from", "applicable law or agreed to in writing, software distributed under the License is", "augmented_images_count += self.noise_images if self.horizontal_flip: augmented_images_count += 1 if self.vertical_flip: augmented_images_count += 1", "data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase = augmentation_data['applyErase']", "Column(Boolean, nullable=False, default=False) vertical_flip = Column(Boolean, nullable=False, default=False) apply_random_erase = Column(Boolean, nullable=False, default=False)", "= 0 if self.apply_random_erase: augmented_images_count += self.erase_images if self.apply_noise_injection: augmented_images_count += self.noise_images if", "primary_key=True) dataset_id = Column(Integer, ForeignKey(DatasetsModel.id), nullable=False) horizontal_flip = Column(Boolean, nullable=False, default=False) vertical_flip =", "return { **super().json(), **self.dataset.json() } @property def augmented_images_count(self) -> int: augmented_images_count = 0", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "= Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer, nullable=True) apply_noise_injection", "noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False)", "relationship, backref from wb.main.enumerates import JobTypesEnum from wb.main.models.datasets_model import DatasetsModel, DatasetJobData from wb.main.models.jobs_model", "__tablename__ = 'dataset_augmentation_jobs' __mapper_args__ = { 'polymorphic_identity': JobTypesEnum.augment_dataset_type.value } job_id = Column(Integer, ForeignKey(JobsModel.job_id),", "from wb.main.models.jobs_model import JobsModel class DatasetAugmentationJobData(TypedDict): applyHorizontalFlip: bool applyVerticalFlip: bool applyErase: bool eraseRatio:", "= data['datasetId'] self.vertical_flip = augmentation_data['applyVerticalFlip'] self.horizontal_flip = augmentation_data['applyHorizontalFlip'] self.apply_noise_injection = augmentation_data['applyNoise'] self.apply_random_erase =", "nullable=True) apply_noise_injection = Column(Boolean, nullable=False, default=False) noise_ratio = Column(Float, nullable=True) noise_images = Column(Integer,", "= Column(Integer, nullable=True) apply_image_corrections = Column(Boolean, nullable=False, default=False) image_corrections = Column(Text, nullable=True) dataset", "with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "applyVerticalFlip: bool applyErase: bool eraseRatio: int eraseImages: int applyNoise: bool noiseRatio: int noiseImages:", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software", "apply_random_erase = Column(Boolean, nullable=False, default=False) erase_ratio = Column(Float, nullable=True) erase_images = Column(Integer, nullable=True)" ]
[ "None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\", self.name)", "= [] self._worker = None def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun)", "import logging from zorro import Condition, gethub log = logging.getLogger(__name__) class Event(object): def", "zorro import Condition, gethub log = logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name", "name self._listeners = [] self._worker = None def listen(self, fun): self._listeners.append(fun) def unlisten(self,", "name=None): self.name = name self._listeners = [] self._worker = None def listen(self, fun):", "self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\", self.name) for l", "<filename>tilenol/event.py import logging from zorro import Condition, gethub log = logging.getLogger(__name__) class Event(object):", "= name self._listeners = [] self._worker = None def listen(self, fun): self._listeners.append(fun) def", "_do_work(self): try: log.debug(\"Processing event %r\", self.name) for l in self._listeners: l() finally: self._worker", "[] self._worker = None def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def", "def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\",", "unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker is None", "%r\", self.name) if self._worker is None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self):", "if self._worker is None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing", "def _do_work(self): try: log.debug(\"Processing event %r\", self.name) for l in self._listeners: l() finally:", "log.debug(\"Emitting event %r\", self.name) if self._worker is None and self._listeners: self._worker = gethub().do_spawn(self._do_work)", "None def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event", "def emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker is None and self._listeners: self._worker", "event %r\", self.name) if self._worker is None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def", "self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker is None and self._listeners:", "self._worker is None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event", "try: log.debug(\"Processing event %r\", self.name) for l in self._listeners: l() finally: self._worker =", "listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name)", "logging from zorro import Condition, gethub log = logging.getLogger(__name__) class Event(object): def __init__(self,", "from zorro import Condition, gethub log = logging.getLogger(__name__) class Event(object): def __init__(self, name=None):", "class Event(object): def __init__(self, name=None): self.name = name self._listeners = [] self._worker =", "self._worker = None def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self):", "fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name) if", "log.debug(\"Processing event %r\", self.name) for l in self._listeners: l() finally: self._worker = None", "gethub log = logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name = name self._listeners", "= logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name = name self._listeners = []", "__init__(self, name=None): self.name = name self._listeners = [] self._worker = None def listen(self,", "def __init__(self, name=None): self.name = name self._listeners = [] self._worker = None def", "self._listeners = [] self._worker = None def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun):", "emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker is None and self._listeners: self._worker =", "and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\", self.name) for", "gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\", self.name) for l in self._listeners: l()", "self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker", "fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker is None and", "= None def listen(self, fun): self._listeners.append(fun) def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting", "def unlisten(self, fun): self._listeners.remove(fun) def emit(self): log.debug(\"Emitting event %r\", self.name) if self._worker is", "import Condition, gethub log = logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name =", "= gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\", self.name) for l in self._listeners:", "log = logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name = name self._listeners =", "logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name = name self._listeners = [] self._worker", "Event(object): def __init__(self, name=None): self.name = name self._listeners = [] self._worker = None", "self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\", self.name) for l in", "self.name = name self._listeners = [] self._worker = None def listen(self, fun): self._listeners.append(fun)", "is None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try: log.debug(\"Processing event %r\",", "Condition, gethub log = logging.getLogger(__name__) class Event(object): def __init__(self, name=None): self.name = name", "self.name) if self._worker is None and self._listeners: self._worker = gethub().do_spawn(self._do_work) def _do_work(self): try:" ]
[ "import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True def fork_repo(repo): f_url", "# Checks if a branch already exists of a given name cmd =", "line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1 newlines.append(line) with", "# Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit", "repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the directories", "to bot directory push_commits(repo) # Create pull request pull_status = pull_request_repo(repo) return pull_status", "subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if a branch already exists of a", "set as an env variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN =", "diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any edits have been made", "in r.json(): from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return", "branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git", "60 # Verify that there is a token set as an env variable", "git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except:", "u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name} again after", "False # Valid SHA1 hash will be forty characters long return len(p.strip()) ==", "as FIN: for line in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in", "the current branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set", "bad_word.lower() in repo_name.lower(): return False with enter_repo(repo): # Find READMES F_README = [x", "in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections = 0 for", "positive! if bad_word.lower() in repo_name.lower(): return False with enter_repo(repo): # Find READMES F_README", "{\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL +", "--\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any edits have been made this will", "True return p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have been made,", "FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections", "p = subprocess.check_output(cmd,shell=True).strip() # If any edits have been made this will return", "p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo)) return", "{} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd", "have been made cmd = \"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() #", "{}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word): full_name", "made cmd = \"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any", "shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url", "if any substantial commits have been made cmd = \"git diff {master_branch} --\".format(**repo)", "\"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text", "push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git clone -q --single-branch", "username cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email", "except: msg = u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd)", "repo_name = full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\"", "= \"git push -u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo)", "= fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status", "= True FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time = 60 # Verify", "= line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2): corrections = 0 newlines =", "{bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint", "API_URL = \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\"", "with codecs.open(f,'r','utf-8') as FIN: for line in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing", ": good_word, \"bad_word\" : bad_word, } # Check if the user_name is a", "except UnicodeDecodeError: # Skip the repo if the file is too funky for", "Check if repo_name is a \"bad_word\", this is also a false positive! if", "cmd = \"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any edits", "does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def", "cmd = \"git clone -q --single-branch --depth 1 \" + git_endpoint if not", "[] with codecs.open(f,'r','utf-8') as FIN: for line in FIN: if w1.lower() in line.lower():", "FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks if", "rev-parse -q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False #", "False # Check if repo_name is a \"bad_word\", this is also a false", "= u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name} again", "logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes", "as an env variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token]", "open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg =", "as FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip()", "as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any substantial commits", "request status {}\".format(r)) return True def fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params)", "-u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git", "user_name, \"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word, } # Check", "F_README: try: correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip the repo", ": pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json():", "status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push", "@contextmanager def enter_repo(repo): # Remember our original directory org_dir = os.getcwd() repo[\"bot_name\"] =", "pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True def fork_repo(repo): f_url = fork_url.format(**repo) r", "# Target word bad_word = \"Celcius\" good_word = \"Celsius\" full_name = \"thoppe/I-am-error\" fix_repo(full_name,", "with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg", "os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning", "a token set as an env variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\"", "original directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used", "bad_word, } # Check if the user_name is a \"bad_word\", this is a", "a false positive! if bad_word.lower() in user_name.lower(): return False # Check if repo_name", "Load the PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split()) with", "p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid SHA1 hash will be", "\"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo)", "u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): #", "new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \" + push_url.format(**repo) os.system(cmd) def", "full_name.strip() user_name, repo_name = full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\" :", "repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections = 0 for fr in", "= commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit -a -m \"{commit_header}\" -m", "changes to bot directory push_commits(repo) # Create pull request pull_status = pull_request_repo(repo) return", "bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line =", "= 60 # Verify that there is a token set as an env", "202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd", "not is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request", "{}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower())", "Remember our original directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD>", "be forty characters long return len(p.strip()) == 40 def create_branch(repo): # Attempts to", "FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): # Remember our original directory org_dir =", "forks\") def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name, repo_name = full_name.split('/') repo", "good_word) except UnicodeDecodeError: # Skip the repo if the file is too funky", "{branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL", "\"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word, }", "= \"UnicodeDecode Error\" logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections))", "push -u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd =", "forty characters long return len(p.strip()) == 40 def create_branch(repo): # Attempts to create", "this is a false positive! if bad_word.lower() in user_name.lower(): return False # Check", "PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as", "== 40 def create_branch(repo): # Attempts to create the branch in repo[\"branch_name\"] if", "repo if the file is too funky for utf-8 msg = \"UnicodeDecode Error\"", "cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError:", "= line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2): corrections =", "= line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line", "FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any substantial commits have", "also a false positive! if bad_word.lower() in repo_name.lower(): return False with enter_repo(repo): #", "FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any substantial commits have been made cmd", "# Used so github can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record", "Create pull request pull_status = pull_request_repo(repo) return pull_status ############################################################### if __name__ == \"__main__\":", ": user_name, \"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word, } #", "+ \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url", "our original directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> #", "hash will be forty characters long return len(p.strip()) == 40 def create_branch(repo): #", "\"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in", "Checks if a branch already exists of a given name cmd = \"git", "FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with", "FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter", "w2): corrections = 0 newlines = [] with codecs.open(f,'r','utf-8') as FIN: for line", "user_name is a \"bad_word\", this is a false positive! if bad_word.lower() in user_name.lower():", ": repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if", "name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) #", "Skip the repo if the file is too funky for utf-8 msg =", "in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git checkout", "json, logging, glob, codecs, os, time, subprocess from contextlib import contextmanager import requests", "seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u", "{branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg", "'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield", "= r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time))", "glob, codecs, os, time, subprocess from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING)", "FIN: for line in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f))", "= 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd)", "directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so", "clone_url.format(**repo) cmd = \"git clone -q --single-branch --depth 1 \" + git_endpoint if", "False with enter_repo(repo): # Find READMES F_README = [x for x in glob.glob(\"*.*\")", "logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo):", "= requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2):", "= [x for x in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word)", "for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed", "Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit -a", "= u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo):", "\"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL", "edits have been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data", "is a \"bad_word\", this is also a false positive! if bad_word.lower() in repo_name.lower():", "--depth 1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo", "Push the changes to bot directory push_commits(repo) # Create pull request pull_status =", "config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf", "clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get the current branch name", "if a branch already exists of a given name cmd = \"git rev-parse", "True def fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating", "\"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\"", "len(p.strip()) == 40 def create_branch(repo): # Attempts to create the branch in repo[\"branch_name\"]", "\"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to bot directory push_commits(repo) #", "subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True)", "\"bad_word\", this is a false positive! if bad_word.lower() in user_name.lower(): return False #", "__name__ == \"__main__\": # Target word bad_word = \"Celcius\" good_word = \"Celsius\" full_name", "made this will return True return p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No", "from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete =", "= API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name},", "\"bad_word\", this is also a false positive! if bad_word.lower() in repo_name.lower(): return False", "return corrections @contextmanager def enter_repo(repo): # Remember our original directory org_dir = os.getcwd()", "logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \" + push_url.format(**repo) os.system(cmd)", "if bad_word.lower() in user_name.lower(): return False # Check if repo_name is a \"bad_word\",", "sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if a branch already", "line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return", "have been made this will return True return p def pull_request_repo(repo): if not", "# Verify that there is a token set as an env variable and", "def is_branch_different_from_default(repo): # Checks if any substantial commits have been made cmd =", "track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of the", "git_endpoint = clone_url.format(**repo) cmd = \"git clone -q --single-branch --depth 1 \" +", "p.split(']')[0].split('[')[1] # Set the username cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd", "False total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] =", "logging.info(\"Creating pull request for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"],", "been made cmd = \"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If", "subprocess.CalledProcessError: return False # Valid SHA1 hash will be forty characters long return", "made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data = { \"head\"", "the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd =", "cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo)", "= clone_url.format(**repo) cmd = \"git clone -q --single-branch --depth 1 \" + git_endpoint", "\"good_word\" : good_word, \"bad_word\" : bad_word, } # Check if the user_name is", "# Check if the user_name is a \"bad_word\", this is a false positive!", "branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username", "[x for x in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo)", "in repo_name.lower(): return False with enter_repo(repo): # Find READMES F_README = [x for", "os, json, logging, glob, codecs, os, time, subprocess from contextlib import contextmanager import", "-a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to bot directory", "0 for fr in F_README: try: correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError:", "READMES F_README = [x for x in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"]", "try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo", "so github can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full", "{}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new", "request pull_status = pull_request_repo(repo) return pull_status ############################################################### if __name__ == \"__main__\": # Target", "os.system(cmd) def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg =", "\"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered", "open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any substantial", "is a token set as an env variable and load it shell_token =", "FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo):", "the file is too funky for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return", "time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \"", "\"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word, } # Check if", "repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir", "characters long return len(p.strip()) == 40 def create_branch(repo): # Attempts to create the", "the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of the repo", "READMES total_corrections = 0 for fr in F_README: try: correction_count = fix_file(fr, bad_word,", "f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status {}\".format(status))", "\"git push -u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd", "bad_word, good_word) except UnicodeDecodeError: # Skip the repo if the file is too", "= subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid SHA1 hash will be forty", "data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" :", "create the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd", "logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\")", "{branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with open(\"messages/pull_request.txt\") as FIN:", "# Get the current branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1]", "\"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line", "for line in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line", "if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg", "if the user_name is a \"bad_word\", this is a false positive! if bad_word.lower()", "os, time, subprocess from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork =", "} url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint", "delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" #", "{master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any edits have been made this", "return True def fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code", "for x in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) #", "line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f,", "SHA1 hash will be forty characters long return len(p.strip()) == 40 def create_branch(repo):", "url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted bot repo", "= os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so github can", "cmd = 'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the", "UnicodeDecodeError: # Skip the repo if the file is too funky for utf-8", "FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): # Remember our original directory org_dir", "= { \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\" :", "# Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo", "enter_repo(repo): # Remember our original directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"]", "\"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if", "total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo)", "given name cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip()", "forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get the current", "' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN:", "subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username cmd = 'git config", "logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time = 60", "repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of the repo repo[\"full_name\"] =", "# Fix READMES total_corrections = 0 for fr in F_README: try: correction_count =", "return False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\"", "commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo)", "# Attempts to create the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new", "1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): #", "branch already exists of a given name cmd = \"git rev-parse -q --verify", "if __name__ == \"__main__\": # Target word bad_word = \"Celcius\" good_word = \"Celsius\"", "if not is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull", "= fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections", "user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\")", "clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg", "\"git rev-parse -q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False", "msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name}", "def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo)) return False", "config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting", "Fix READMES total_corrections = 0 for fr in F_README: try: correction_count = fix_file(fr,", "FLAG_fork = True FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time = 60 #", "+ \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url =", "\"git clone -q --single-branch --depth 1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try:", "full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name,", "# Load the PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split())", "r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time)", "text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN:", "new branch {branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url", "assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch", "def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line =", "clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git clone -q --single-branch --depth 1 \"", "{}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as", "'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections = 0", "logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word):", "branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo):", "time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if a branch already exists of", "spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd =", "does_git_branch_exist(repo): # Checks if a branch already exists of a given name cmd", "requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line", "== 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo))", "# Set the username cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd =", "r.json(): from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True", "-q --single-branch --depth 1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg =", "org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so github", "in F_README: try: correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip the", "have been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data =", ": bad_word, } # Check if the user_name is a \"bad_word\", this is", "return len(p.strip()) == 40 def create_branch(repo): # Attempts to create the branch in", "codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): # Remember our original", "pull_status = pull_request_repo(repo) return pull_status ############################################################### if __name__ == \"__main__\": # Target word", "import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True fork_sleep_time", "--verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid SHA1", "fr in F_README: try: correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip", "# Checks if any substantial commits have been made cmd = \"git diff", "{full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg)", "\"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url =", "'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to", "# Valid SHA1 hash will be forty characters long return len(p.strip()) == 40", "Check if the user_name is a \"bad_word\", this is a false positive! if", "this will return True return p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits", "= True fork_sleep_time = 10 clone_error_sleep_time = 60 # Verify that there is", "x in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix", "os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get", "push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \" + push_url.format(**repo)", "not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd)", "checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r =", "again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if a", "+= correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"]", "} # Check if the user_name is a \"bad_word\", this is a false", "False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" :", "def fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork,", "{full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if", "of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create", "variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,}", "full name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo)", "line = fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return", "= os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\"", "fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo):", "logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if a branch already exists", "repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so github can track the", "10 clone_error_sleep_time = 60 # Verify that there is a token set as", "with enter_repo(repo): # Find READMES F_README = [x for x in glob.glob(\"*.*\") if", "show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username cmd = 'git config user.name", "= FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any substantial commits have been made", "logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo)", "+= 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo):", "1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo)", "repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\"", "not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg =", "this is also a false positive! if bad_word.lower() in repo_name.lower(): return False with", "the repo if the file is too funky for utf-8 msg = \"UnicodeDecode", "current branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the", "a given name cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo) try: p =", "in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2)", "try: correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip the repo if", "repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r =", "good_word, bad_word): full_name = full_name.strip() user_name, repo_name = full_name.split('/') repo = { \"access_token\"", "== \"__main__\": # Target word bad_word = \"Celcius\" good_word = \"Celsius\" full_name =", "GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL +", "repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2)", "+ push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git clone -q", "# Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get the current branch name p", "Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory", "= pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint import pprint", "subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid SHA1 hash will be forty characters", "return p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo))", "def enter_repo(repo): # Remember our original directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\"", "and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL", "fork_sleep_time = 10 clone_error_sleep_time = 60 # Verify that there is a token", "\"errors\" in r.json(): from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r))", "= \"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title())", "pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name}", "contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True", "fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name, repo_name = full_name.split('/') repo = {", "env variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params =", "If any edits have been made this will return True return p def", "name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username cmd", "True FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time = 60 # Verify that", "fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper())", "the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the", "with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): # Remember our", "= \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir -p", "'.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg", "= full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" :", "= API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load", "-rf forks\") def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name, repo_name = full_name.split('/')", "{ \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), }", "line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2): corrections = 0 newlines = []", "repo directory os.chdir(repo[\"repo_name\"]) # Get the current branch name p = subprocess.check_output(\"git show-branch\",shell=True)", "\" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git clone", "\"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections = 0 for fr in F_README: try:", "# Record the full name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"]))", "print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True def fork_repo(repo): f_url = fork_url.format(**repo)", "# Check if repo_name is a \"bad_word\", this is also a false positive!", "pull_status ############################################################### if __name__ == \"__main__\": # Target word bad_word = \"Celcius\" good_word", "os.system(cmd) def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git clone -q --single-branch --depth", "try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid SHA1 hash will", "in user_name.lower(): return False # Check if repo_name is a \"bad_word\", this is", "a \"bad_word\", this is also a false positive! if bad_word.lower() in repo_name.lower(): return", "FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time = 60 # Verify that there", "\"__main__\": # Target word bad_word = \"Celcius\" good_word = \"Celsius\" full_name = \"thoppe/I-am-error\"", "= <PASSWORD> # Used so github can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\"", "API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the", "Get the current branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] #", "= 0 newlines = [] with codecs.open(f,'r','utf-8') as FIN: for line in FIN:", "positive! if bad_word.lower() in user_name.lower(): return False # Check if repo_name is a", "True fork_sleep_time = 10 clone_error_sleep_time = 60 # Verify that there is a", "if \"errors\" in r.json(): from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status", "repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name} again after sleep\".format(**repo)", "-p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get the", "for fr in F_README: try: correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError: #", "logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time =", "commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any substantial commits have been", "cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL +", "delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name,", "a branch already exists of a given name cmd = \"git rev-parse -q", "def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push -u \" +", "Record the full name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if", "commit_text_msg.format(**repo) cmd = 'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push", "API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\"", "time, subprocess from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True", "line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2): corrections = 0 newlines", "login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url =", "0 newlines = [] with codecs.open(f,'r','utf-8') as FIN: for line in FIN: if", "the changes to bot directory push_commits(repo) # Create pull request pull_status = pull_request_repo(repo)", "bad_word.lower() in user_name.lower(): return False # Check if repo_name is a \"bad_word\", this", "already exists of a given name cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo)", "= requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping", "newlines = [] with codecs.open(f,'r','utf-8') as FIN: for line in FIN: if w1.lower()", "= full_name.strip() user_name, repo_name = full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\"", "os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete:", "if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections =", "for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"],", "corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def", "= requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull", "is also a false positive! if bad_word.lower() in repo_name.lower(): return False with enter_repo(repo):", "file is too funky for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return False", "repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit -a -m \"{commit_header}\"", "is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request for", "os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url", "as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): # Remember our original directory", "request for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" :", "will be forty characters long return len(p.strip()) == 40 def create_branch(repo): # Attempts", "as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def", "push_commits(repo) # Create pull request pull_status = pull_request_repo(repo) return pull_status ############################################################### if __name__", "been made this will return True return p def pull_request_repo(repo): if not is_branch_different_from_default(repo):", "def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted", "x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections = 0 for fr", "subprocess.check_output(cmd,shell=True).strip() # If any edits have been made this will return True return", "{}\".format(r)) return True def fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status =", "\"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo))", "logging.info(msg) subprocess.check_output(cmd,shell=True) except: msg = u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time)", "fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip the repo if the file is", "repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word, } # Check if the user_name", "an env variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params", "Set the username cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git", "pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from", "pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\")", "msg = \"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line =", "pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have been made, skipping!\".format(**repo)) return False logging.info(\"Creating", "= \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url", "os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so github can track", "return pull_status ############################################################### if __name__ == \"__main__\": # Target word bad_word = \"Celcius\"", "\"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url", "repo[\"bot_password\"] = <PASSWORD> # Used so github can track the submissions... repo[\"bot_email\"] =", "correction_count = fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip the repo if the", "commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): #", "repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks", "\"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with open(\"messages/pull_request.txt\") as", "status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line =", "\"bad_word\" : bad_word, } # Check if the user_name is a \"bad_word\", this", "def fix_file(f, w1, w2): corrections = 0 newlines = [] with codecs.open(f,'r','utf-8') as", "<PASSWORD> # Used so github can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" #", "corrections @contextmanager def enter_repo(repo): # Remember our original directory org_dir = os.getcwd() repo[\"bot_name\"]", "in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1 newlines.append(line)", "directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"]) #", "# Skip the repo if the file is too funky for utf-8 msg", "{ \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\" : good_word,", "if bad_word.lower() in repo_name.lower(): return False with enter_repo(repo): # Find READMES F_README =", "status {}\".format(r)) return True def fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status", "if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections +=", "Attempts to create the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch", "can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of", "os.chdir(repo[\"repo_name\"]) # Get the current branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] =", "repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd)", ": repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word, } # Check if the", "requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True fork_sleep_time = 10 clone_error_sleep_time", "Find READMES F_README = [x for x in glob.glob(\"*.*\") if 'readme.' in x.lower()]", "line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def", "fix_file(f, w1, w2): corrections = 0 newlines = [] with codecs.open(f,'r','utf-8') as FIN:", "logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd =", "\"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = '", "pull_request_repo(repo) return pull_status ############################################################### if __name__ == \"__main__\": # Target word bad_word =", "it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\"", "= FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks", "pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True def fork_repo(repo): f_url =", "\"UnicodeDecode Error\" logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) #", "{repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line", "-b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params)", "-m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to bot directory push_commits(repo)", "token set as an env variable and load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN", "too funky for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return False total_corrections +=", "def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name, repo_name = full_name.split('/') repo =", "pull request for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\"", "r = requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name}, status {}\" logging.info(msg.format(r.status_code,**repo)) def", "= \"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so github can track the submissions...", "= [] with codecs.open(f,'r','utf-8') as FIN: for line in FIN: if w1.lower() in", "+ git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg) subprocess.check_output(cmd,shell=True)", "there is a token set as an env variable and load it shell_token", "in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES", "= \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url =", "return False # Check if repo_name is a \"bad_word\", this is also a", "= fix_file(fr, bad_word, good_word) except UnicodeDecodeError: # Skip the repo if the file", "utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed {}", "is a false positive! if bad_word.lower() in user_name.lower(): return False # Check if", "r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint import pprint print pprint(r.json()[\"errors\"])", "with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip() def is_branch_different_from_default(repo): # Checks if any", "repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\"", "line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2): corrections = 0", "w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1", "logging, glob, codecs, os, time, subprocess from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO)", ": GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\" :", "with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg =", "############################################################### if __name__ == \"__main__\": # Target word bad_word = \"Celcius\" good_word =", "any substantial commits have been made cmd = \"git diff {master_branch} --\".format(**repo) p", "the PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg = ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\")", "\"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data)", "is too funky for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return False total_corrections", "{} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git push", "40 def create_branch(repo): # Attempts to create the branch in repo[\"branch_name\"] if not", "def clone_repo(repo): git_endpoint = clone_url.format(**repo) cmd = \"git clone -q --single-branch --depth 1", "except subprocess.CalledProcessError: return False # Valid SHA1 hash will be forty characters long", "# Remember our original directory org_dir = os.getcwd() repo[\"bot_name\"] = \"orthographic-pedant\" repo[\"bot_password\"] =", "good_word, \"bad_word\" : bad_word, } # Check if the user_name is a \"bad_word\",", "exists of a given name cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo) try:", "pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True def fork_repo(repo):", "API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url", "load it shell_token = \"GITHUB_ORTHOGRAPHIC_TOKEN\" GITHUB_TOKEN = os.environ[shell_token] login_params = {\"access_token\":GITHUB_TOKEN,} API_URL =", "bot directory push_commits(repo) # Create pull request pull_status = pull_request_repo(repo) return pull_status ###############################################################", "word bad_word = \"Celcius\" good_word = \"Celsius\" full_name = \"thoppe/I-am-error\" fix_repo(full_name, good_word, bad_word)", "in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT:", "--single-branch --depth 1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning", "total_corrections = 0 for fr in F_README: try: correction_count = fix_file(fr, bad_word, good_word)", "\"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def", "repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username cmd = 'git config user.name \"{bot_name}\"'.format(**repo)", "branch {branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url =", "fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager", "\"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir -p forks\")", "directory os.chdir(repo[\"repo_name\"]) # Get the current branch name p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"]", "full_name = full_name.strip() user_name, repo_name = full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN,", "the user_name is a \"bad_word\", this is a false positive! if bad_word.lower() in", "= 10 clone_error_sleep_time = 60 # Verify that there is a token set", "is a \"bad_word\", this is a false positive! if bad_word.lower() in user_name.lower(): return", "logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8')", "w1, w2): corrections = 0 newlines = [] with codecs.open(f,'r','utf-8') as FIN: for", "requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request", "clone -q --single-branch --depth 1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg", "<reponame>thoppe/orthographic-pedant<gh_stars>100-1000 import os, json, logging, glob, codecs, os, time, subprocess from contextlib import", "logging.info(msg.format(r.status_code,**repo)) def fix_word(line,w1,w2): line = line.replace(w1.title(),w2.title()) line = line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line", "\"orthographic-pedant\" repo[\"bot_password\"] = <PASSWORD> # Used so github can track the submissions... repo[\"bot_email\"]", "will return True return p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have", "= \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo)", "funky for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg) return False total_corrections += correction_count", "skipping!\".format(**repo)) return False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo),", "= \"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo)", "pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint import pprint print", "commits have been made cmd = \"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip()", "= { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo),", "\"git checkout -b {branch_name}\".format(**repo) os.system(cmd) def delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r", "# Find READMES F_README = [x for x in glob.glob(\"*.*\") if 'readme.' in", "push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with", "delete_bot_repo(repo): url = API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted bot", "for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def push_commits(repo): logging.info(\"Push new branch {bot_name}:{branch_name}\".format(**repo)) cmd = \"git", "'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm", "fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status ==", "directory push_commits(repo) # Create pull request pull_status = pull_request_repo(repo) return pull_status ############################################################### if", "# Create pull request pull_status = pull_request_repo(repo) return pull_status ############################################################### if __name__ ==", "line def fix_file(f, w1, w2): corrections = 0 newlines = [] with codecs.open(f,'r','utf-8')", "Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get the current branch name p =", "to create the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo))", "import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True fork_sleep_time = 10", "that there is a token set as an env variable and load it", "the username cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config", "return False with enter_repo(repo): # Find READMES F_README = [x for x in", "fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL", "if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word): full_name =", "status = r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {}", "the repo directory os.chdir(repo[\"repo_name\"]) # Get the current branch name p = subprocess.check_output(\"git", "any edits have been made this will return True return p def pull_request_repo(repo):", "return True return p def pull_request_repo(repo): if not is_branch_different_from_default(repo): logging.info(\"No edits have been", "Checks if any substantial commits have been made cmd = \"git diff {master_branch}", "msg = u\"Cloning repo {full_name} again after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def", "os.system(cmd) def does_git_branch_exist(repo): # Checks if a branch already exists of a given", "= \"git rev-parse -q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return", "user_name, repo_name = full_name.split('/') repo = { \"access_token\" : GITHUB_TOKEN, \"user_name\" : user_name,", "FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip()", ":\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url =", "\"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\"", "repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git checkout -b", "# Push the changes to bot directory push_commits(repo) # Create pull request pull_status", "# If any edits have been made this will return True return p", "codecs, os, time, subprocess from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork", "codecs.open(f,'r','utf-8') as FIN: for line in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{}", "a \"bad_word\", this is a false positive! if bad_word.lower() in user_name.lower(): return False", "-m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to bot directory push_commits(repo) # Create", "def does_git_branch_exist(repo): # Checks if a branch already exists of a given name", "been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request for {full_name}\".format(**repo)) data = {", "pull request pull_status = pull_request_repo(repo) return pull_status ############################################################### if __name__ == \"__main__\": #", "+ \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR", "substantial commits have been made cmd = \"git diff {master_branch} --\".format(**repo) p =", "newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines)) return corrections @contextmanager def enter_repo(repo): # Remember", "GITHUB_TOKEN, \"user_name\" : user_name, \"repo_name\" : repo_name, \"good_word\" : good_word, \"bad_word\" : bad_word,", "of a given name cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo) try: p", "if the file is too funky for utf-8 msg = \"UnicodeDecode Error\" logging.error(msg)", "Target word bad_word = \"Celcius\" good_word = \"Celsius\" full_name = \"thoppe/I-am-error\" fix_repo(full_name, good_word,", "\"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any edits have been", "changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git commit -a -m", "Verify that there is a token set as an env variable and load", "{}\".format(w1,w2,f)) line = fix_word(line,w1,w2) corrections += 1 newlines.append(line) with codecs.open(f,'w','utf-8') as FOUT: FOUT.write(''.join(newlines))", "= subprocess.check_output(cmd,shell=True).strip() # If any edits have been made this will return True", "the full name of the repo repo[\"full_name\"] = \"{user_name}:{repo_name}\".format(**repo) logging.info(\"Entered {}\".format(repo[\"full_name\"])) if FLAG_fork:", "logging.info(\"No edits have been made, skipping!\".format(**repo)) return False logging.info(\"Creating pull request for {full_name}\".format(**repo))", "return False total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"]", ": repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\" : pull_request_msg.format(**repo), } url = pulls_url.format(**repo) r", "= API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL +", "open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as FIN: commit_text_msg = FIN.read().strip()", "if repo_name is a \"bad_word\", this is also a false positive! if bad_word.lower()", "os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name,", "= line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1,", "url = pulls_url.format(**repo) r = requests.post(url,params=login_params,json=data) if \"errors\" in r.json(): from pprint import", "Used so github can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the", "create_branch(repo): # Attempts to create the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo): logging.info(\"Creating", "create_branch(repo) # Fix READMES total_corrections = 0 for fr in F_README: try: correction_count", "yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word,", "-q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid", "is_branch_different_from_default(repo): # Checks if any substantial commits have been made cmd = \"git", "= {\"access_token\":GITHUB_TOKEN,} API_URL = \"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL", "line.replace(w1,w2) line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2):", "os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"]) # Get the current branch", "edits have been made this will return True return p def pull_request_repo(repo): if", "logging.info(\"Pull request status {}\".format(r)) return True def fork_repo(repo): f_url = fork_url.format(**repo) r =", "line in FIN: if w1.lower() in line.lower(): logging.info(\"Fixing {}->{} in {}\".format(w1,w2,f)) line =", "user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"]))", "clone_error_sleep_time = 60 # Verify that there is a token set as an", "F_README = [x for x in glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] =", "\" + git_endpoint if not os.path.exists(repo[\"repo_name\"]): try: msg = u\"Cloning repo {full_name}\".format(**repo) logging.info(msg)", "= 0 for fr in F_README: try: correction_count = fix_file(fr, bad_word, good_word) except", "Error\" logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit", "= API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url = API_URL + \"/{user_name}/{repo_name}\" push_url = \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\"", "return line def fix_file(f, w1, w2): corrections = 0 newlines = [] with", "r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202)", "= ' '.join(FIN.read().split()) with open(\"messages/commit_header.txt\") as FIN: commit_header_msg = FIN.read().strip() with open(\"messages/commit_text.txt\") as", "github can track the submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name", "logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for {} seconds.\".format(fork_sleep_time)) time.sleep(fork_sleep_time) def", "line = line.replace(w1.lower(),w2.lower()) line = line.replace(w1.upper(),w2.upper()) return line def fix_file(f, w1, w2): corrections", "fork_repo(repo) # Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the", "false positive! if bad_word.lower() in user_name.lower(): return False # Check if repo_name is", "= subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username cmd = 'git", "= \"git clone -q --single-branch --depth 1 \" + git_endpoint if not os.path.exists(repo[\"repo_name\"]):", "= 'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes", "contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete = True fork_sleep_time =", "mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] = commit_text_msg.format(**repo) cmd = 'git", "corrections = 0 newlines = [] with codecs.open(f,'r','utf-8') as FIN: for line in", "msg = \"UnicodeDecode Error\" logging.error(msg) return False total_corrections += correction_count logging.info(\"Fixed {} spelling", "user_name.lower(): return False # Check if repo_name is a \"bad_word\", this is also", "= p.split(']')[0].split('[')[1] # Set the username cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd)", "cmd = 'git config user.name \"{bot_name}\"'.format(**repo) os.system(cmd) cmd = 'git config user.email \"{bot_email}\"'.format(**repo)", "if not does_git_branch_exist(repo): logging.info(\"Creating new branch {branch_name}\".format(**repo)) cmd = \"git checkout -b {branch_name}\".format(**repo)", "requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status {}\".format(status)) assert(status == 202) logging.info(\"Sleeping for", "return False # Valid SHA1 hash will be forty characters long return len(p.strip())", "the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) # Enter the repo directory os.chdir(repo[\"repo_name\"])", "\"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to bot directory push_commits(repo) # Create pull", "def create_branch(repo): # Attempts to create the branch in repo[\"branch_name\"] if not does_git_branch_exist(repo):", "= pull_request_repo(repo) return pull_status ############################################################### if __name__ == \"__main__\": # Target word bad_word", "p = subprocess.check_output(\"git show-branch\",shell=True) repo[\"master_branch\"] = p.split(']')[0].split('[')[1] # Set the username cmd =", "submissions... repo[\"bot_email\"] = \"travis.hoppe\"+\"+orthographicpendant\"+\"@\"+\"<EMAIL>\" # Record the full name of the repo repo[\"full_name\"]", "os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name, repo_name =", "= \"git diff {master_branch} --\".format(**repo) p = subprocess.check_output(cmd,shell=True).strip() # If any edits have", "a false positive! if bad_word.lower() in repo_name.lower(): return False with enter_repo(repo): # Find", "commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) # Push the changes to bot", "API_URL + \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name}, status", "import os, json, logging, glob, codecs, os, time, subprocess from contextlib import contextmanager", "Valid SHA1 hash will be forty characters long return len(p.strip()) == 40 def", "= 'git config user.email \"{bot_email}\"'.format(**repo) os.system(cmd) yield logging.info(\"Exiting {}\".format(repo[\"full_name\"])) if FLAG_delete: delete_bot_repo(repo) os.chdir(org_dir)", "os.chdir(org_dir) os.system(\"rm -rf forks\") def fix_repo(full_name, good_word, bad_word): full_name = full_name.strip() user_name, repo_name", "= \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections = 0 for fr in F_README:", "bad_word): full_name = full_name.strip() user_name, repo_name = full_name.split('/') repo = { \"access_token\" :", "= commit_text_msg.format(**repo) cmd = 'git commit -a -m \"{commit_header}\" -m \"{commit_text}\"'.format(**repo) os.system(cmd) #", "after sleep\".format(**repo) logging.info(msg) time.sleep(clone_error_sleep_time) subprocess.check_output(cmd,shell=True) os.system(cmd) def does_git_branch_exist(repo): # Checks if a branch", "from pprint import pprint print pprint(r.json()[\"errors\"]) logging.info(\"Pull request status {}\".format(r)) return True def", "{full_name}\".format(**repo)) data = { \"head\" :\"{bot_name}:{branch_name}\".format(**repo), \"base\" : repo[\"master_branch\"], \"title\" : repo[\"commit_header\"], \"body\"", "repo_name is a \"bad_word\", this is also a false positive! if bad_word.lower() in", "\"https://api.github.com/repos\" fork_url = API_URL + \"/{user_name}/{repo_name}/forks\" pulls_url = API_URL + \"/{user_name}/{repo_name}/pulls\" delete_url =", "= \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with open(\"messages/pull_request.txt\") as FIN: pull_request_msg =", "{branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except subprocess.CalledProcessError: return False # Valid SHA1 hash", "os.system(cmd) # Push the changes to bot directory push_commits(repo) # Create pull request", "enter_repo(repo): # Find READMES F_README = [x for x in glob.glob(\"*.*\") if 'readme.'", "correction_count logging.info(\"Fixed {} spelling mistakes\".format(total_corrections)) # Commit changes repo[\"commit_header\"] = commit_header_msg.format(**repo) repo[\"commit_text\"] =", "cmd = \"git push -u \" + push_url.format(**repo) os.system(cmd) def clone_repo(repo): git_endpoint =", "fork_repo(repo): f_url = fork_url.format(**repo) r = requests.post(f_url,params=login_params) status = r.status_code logging.info(\"Creating fork, status", "name cmd = \"git rev-parse -q --verify {branch_name}\".format(**repo) try: p = subprocess.check_output(cmd,shell=True).strip() except", "glob.glob(\"*.*\") if 'readme.' in x.lower()] repo[\"branch_name\"] = \"spell_check/{}\".format(good_word) create_branch(repo) # Fix READMES total_corrections", "subprocess from contextlib import contextmanager import requests logging.basicConfig(level=logging.INFO) logging.getLogger(\"requests\").setLevel(logging.WARNING) FLAG_fork = True FLAG_delete", "long return len(p.strip()) == 40 def create_branch(repo): # Attempts to create the branch", "false positive! if bad_word.lower() in repo_name.lower(): return False with enter_repo(repo): # Find READMES", "if FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo) #", "+ \"/{bot_name}/{repo_name}\".format(**repo) r = requests.delete(url,params=login_params) msg = \"Deleted bot repo {repo_name}, status {}\"", "= \"https://{bot_name}:{bot_password}@github.com/{bot_name}/{repo_name} {branch_name}:{branch_name}\" clone_url = \"https://github.com/orthographic-pedant/{repo_name}\" # Load the PR text with open(\"messages/pull_request.txt\")", "repo_name.lower(): return False with enter_repo(repo): # Find READMES F_README = [x for x", "{}\".format(repo[\"full_name\"])) if FLAG_fork: fork_repo(repo) # Create the directories os.system(\"mkdir -p forks\") os.chdir(\"forks\") clone_repo(repo)" ]
[ "# if sec > 59, set 59 (platform leap support) return dt_datetime(y, m,", "None: if _time[0] not in '+-': raise ValueError('Invalid RFC3339 string. Expected timezone.') negative", "= offset * -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon,", "date/time separator.' ) (date, _, _time) = rfc3339_string.partition('t') if not date or not", "_format_date_time(date_time): tm = date_time.timetuple() offset = 0 sign = '+' if date_time.tzinfo is", "RFC3339 compliant date-time string.''' if date_time and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected", "+= c else: break if len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string. Invalid", ") def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time string.''' if date_time and", "if _time[0] not in '+-': raise ValueError('Invalid RFC3339 string. Expected timezone.') negative =", "* -1 return dt_datetime( year, month, day, hour, minute, second, usec, TZFixedOffset(offset) )", "hour, minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant", "RFC3339 string. Invalid timezone.') offset = (off_hour * 60) + off_minute if negative:", "import tzinfo, timedelta, datetime as dt_datetime from time import time, gmtime from math", "if self.offset < 0: sign = '-' return \"%s%d:%d\" % (sign, self.offset /", "(sign, self.offset / 60, self.offset % 60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp,", "day, hour, minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339", "_time[8] == 'z': offset = 0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string.", "RFC3339 string. Remaining data after time zone.' ) else: _time = _time[8:] else:", "RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't' not in", "= _time[:8].split(':') hour = int(hour) minute = int(minute) second = int(second) except ValueError:", "* (6 - len(usec_buf))) _time = _time[9 + len(usec_buf):] elif _time[8] == 'z':", "0 or _time[0] == 'z'): offset = 0 if len(_time[1:]): raise ValueError( 'Invalid", "time import time, gmtime from math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class", "TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset *", "% 60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp +", "hh, mm, ss, us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset = 0", "None if len(_time) > 8: if _time[8] == '.': usec_buf = '' for", "from time import time, gmtime from math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'", "self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset * 60) timestamp =", "by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp", "if _time[8] == '.': usec_buf = '' for c in _time[9:]: if c", "None and (len(_time) == 0 or _time[0] == 'z'): offset = 0 if", "% ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60,", "dt_datetime(y, m, d, hh, mm, ss, us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple()", "ValueError('Invalid RFC3339 string. Invalid timezone.') offset = (off_hour * 60) + off_minute if", "= _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware object", "(year, month, day) = date.split('-') year = int(year) month = int(month) day =", "= int(month) day = int(day) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.')", "< 6: # ugly as shit, but good damn multiplication precision makes #", "except ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.') usec = 0 offset =", "ugly as shit, but good damn multiplication precision makes # it a mess", "as shit, but good damn multiplication precision makes # it a mess usec", "Invalid timezone.') offset = (off_hour * 60) + off_minute if negative: offset =", "raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime", "RFC3339 string. Expected timezone.') negative = True if _time[0] == '-' else False", "len(usec_buf):] elif _time[8] == 'z': offset = 0 if len(_time[9:]): raise ValueError( 'Invalid", "offset = offset * -1 return dt_datetime( year, month, day, hour, minute, second,", "return _format_date_time(utcnow()) def now_to_string(): '''Local date and time RFC3339 compliant date-time string.''' return", "'+' if self.offset < 0: sign = '-' return \"%s%d:%d\" % (sign, self.offset", "_timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset * 60) timestamp = int(floor(t_full)) frac", "support) return dt_datetime(y, m, d, hh, mm, ss, us, tzinfo) def _format_date_time(date_time): tm", "sign = '-' return \"%s%d:%d\" % (sign, self.offset / 60, self.offset % 60)", "return \"%s%d:%d\" % (sign, self.offset / 60, self.offset % 60) def __repr__(self): return", "compliant date-time string.''' if date_time and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a", "UTC date and time RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local", "for c in _time[9:]: if c in '0123456789': usec_buf += c else: break", "= int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset = (off_hour", "0 sign = '+' if date_time.tzinfo is not None: if date_time.tzinfo.__class__ is not", "6: raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec = int(usec_buf) if len(usec_buf) >", "if len(usec_buf) > 0 and len(usec_buf) < 6: # ugly as shit, but", "- len(usec_buf))) _time = _time[9 + len(usec_buf):] elif _time[8] == 'z': offset =", "def dst(self, dt=None): return timedelta(0) def tzname(self, dt=None): sign = '+' if self.offset", "timedelta(0) def tzname(self, dt=None): sign = '+' if self.offset < 0: sign =", "precision makes # it a mess usec = usec * int('1' + '0'", "hh, mm, ss, weekday, jday, dst = gmtime(timestamp) ss = min(ss, 59) #", "return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone =", "dt=None): sign = '+' if self.offset < 0: sign = '-' return \"%s%d:%d\"", "60, self.offset % 60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full =", "dt_datetime( year, month, day, hour, minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize", "current date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object in", "offset = offset * -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year,", "_, _time) = rfc3339_string.partition('t') if not date or not _time: raise ValueError('Invalid RFC3339", "_time[8] == '.': usec_buf = '' for c in _time[9:]: if c in", "_time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string.", "with current date and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant", "or _time[0] == 'z'): offset = 0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339", "offset * -1 return dt_datetime( year, month, day, hour, minute, second, usec, TZFixedOffset(offset)", "rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time separator.' ) (date, _,", "= 0 if offset is None and (len(_time) == 0 or _time[0] ==", "raise ValueError('Invalid RFC3339 string. Invalid time.') usec = 0 offset = None if", "'%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset /", "offset = (off_hour * 60) + off_minute if negative: offset = offset *", "< 0: sign = '-' return \"%s%d:%d\" % (sign, self.offset / 60, self.offset", "# ugly as shit, but good damn multiplication precision makes # it a", "__init__(self, offset): self.offset = offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def", "dst(self, dt=None): return timedelta(0) def tzname(self, dt=None): sign = '+' if self.offset <", "'''Parse RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't' not", "month = int(month) day = int(day) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid", "utcnow_to_string(): '''Current UTC date and time RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def", "c in '0123456789': usec_buf += c else: break if len(usec_buf) > 6: raise", "to RFC3339 compliant date-time string.''' if date_time and date_time.__class__ is not dt_datetime: raise", "ValueError('Invalid RFC3339 string.') try: (year, month, day) = date.split('-') year = int(year) month", "= '-' return \"%s%d:%d\" % (sign, self.offset / 60, self.offset % 60) def", "tz] -> tz's local time from POSIX timestamp.''' if tz is None: tz", "self.offset / 60, self.offset % 60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo):", "string. Invalid fractions.') usec = int(usec_buf) if len(usec_buf) > 0 and len(usec_buf) <", "if not date or not _time: raise ValueError('Invalid RFC3339 string.') try: (year, month,", "elif offset is None: if _time[0] not in '+-': raise ValueError('Invalid RFC3339 string.", "= '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond,", "timezone.') offset = (off_hour * 60) + off_minute if negative: offset = offset", "tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60, offset %", "ValueError('Invalid RFC3339 string. Expected timezone.') negative = True if _time[0] == '-' else", "and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object in local timezone", "DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset def utcoffset(self,", "except ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.') try: (hour, minute, second) =", "def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def dst(self, dt=None): return timedelta(0) def", "(tzinfo.offset * 60) timestamp = int(floor(t_full)) frac = (t_full - timestamp) * 1e6", "int(day) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.') try: (hour, minute, second)", "usec * int('1' + '0' * (6 - len(usec_buf))) _time = _time[9 +", "day = int(day) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.') try: (hour,", "time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date and time RFC3339 compliant", "timezone.') negative = True if _time[0] == '-' else False try: (off_hour, off_minute)", "def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local time from POSIX timestamp.''' if", "\\'T\\' date/time separator.' ) (date, _, _time) = rfc3339_string.partition('t') if not date or", "time RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date and time", "60) timestamp = int(floor(t_full)) frac = (t_full - timestamp) * 1e6 us =", "+ 0.5) if frac >= 0.0 else ceil(frac - 0.5)) if us ==", "m, d, hh, mm, ss, us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset", "and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string", "self.offset = offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def dst(self, dt=None):", "now(): '''datetime aware object in local timezone with current date and time.''' return", "60) def dst(self, dt=None): return timedelta(0) def tzname(self, dt=None): sign = '+' if", "d, hh, mm, ss, us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset =", "date_time.tzinfo.offset if offset < 0: offset = offset * -1 sign = '-'", "_time[:8].split(':') hour = int(hour) minute = int(minute) second = int(second) except ValueError: raise", "from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't'", "in '+-': raise ValueError('Invalid RFC3339 string. Expected timezone.') negative = True if _time[0]", "'''datetime aware object in UTC with current date and time.''' return _timestamp_to_date_time(time(), utc_timezone)", "if offset is None and (len(_time) == 0 or _time[0] == 'z'): offset", "<reponame>kashnick/udatetime from datetime import tzinfo, timedelta, datetime as dt_datetime from time import time,", "import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset =", "RFC3339 string. Missing \\'T\\' date/time separator.' ) (date, _, _time) = rfc3339_string.partition('t') if", "tz = local_timezone elif tz.__class__ is not TZFixedOffset: # TODO: Support all tzinfo", "ValueError('Invalid RFC3339 string. Invalid time.') usec = 0 offset = None if len(_time)", "len(_time) > 8: if _time[8] == '.': usec_buf = '' for c in", "= int(hour) minute = int(minute) second = int(second) except ValueError: raise ValueError('Invalid RFC3339", "= int(year) month = int(month) day = int(day) except ValueError: raise ValueError('Invalid RFC3339", "usec = usec * int('1' + '0' * (6 - len(usec_buf))) _time =", "0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.'", "else False try: (off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute)", "= '+' if date_time.tzinfo is not None: if date_time.tzinfo.__class__ is not TZFixedOffset: #", "time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object in local timezone with", "m, d, hh, mm, ss, weekday, jday, dst = gmtime(timestamp) ss = min(ss,", "ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's", "from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local time from POSIX timestamp.''' if tz", "= 0 sign = '+' if date_time.tzinfo is not None: if date_time.tzinfo.__class__ is", "separator.' ) (date, _, _time) = rfc3339_string.partition('t') if not date or not _time:", "tzinfo, timedelta, datetime as dt_datetime from time import time, gmtime from math import", "> 59, set 59 (platform leap support) return dt_datetime(y, m, d, hh, mm,", "offset / 60, offset % 60 ) def _get_local_utc_offset(): ts = time() return", "not dt_datetime: raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[,", "/ 60, offset % 60 ) def _get_local_utc_offset(): ts = time() return (", "second) = _time[:8].split(':') hour = int(hour) minute = int(minute) second = int(second) except", "tm = date_time.timetuple() offset = 0 sign = '+' if date_time.tzinfo is not", "= date_time.tzinfo.offset if offset < 0: offset = offset * -1 sign =", "from math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset):", "compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't' not in rfc3339_string:", "_format_date_time(utcnow()) def now_to_string(): '''Local date and time RFC3339 compliant date-time string.''' return _format_date_time(now())", "sign, offset / 60, offset % 60 ) def _get_local_utc_offset(): ts = time()", "date and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.'''", "elif tz.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses by calling", "raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' ) else: _time", "utc_timezone) def utcnow_to_string(): '''Current UTC date and time RFC3339 compliant date-time string.''' return", "if sec > 59, set 59 (platform leap support) return dt_datetime(y, m, d,", "ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time separator.' ) (date, _, _time) =", "try: (hour, minute, second) = _time[:8].split(':') hour = int(hour) minute = int(minute) second", "offset = 0 if offset is None and (len(_time) == 0 or _time[0]", "utc_timezone) def now(): '''datetime aware object in local timezone with current date and", "date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp,", "dt=None): return timedelta(seconds=self.offset * 60) def dst(self, dt=None): return timedelta(0) def tzname(self, dt=None):", "rfc3339_string.partition('t') if not date or not _time: raise ValueError('Invalid RFC3339 string.') try: (year,", "but good damn multiplication precision makes # it a mess usec = usec", "if tz is None: tz = local_timezone elif tz.__class__ is not TZFixedOffset: #", "_time[0] == '-' else False try: (off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour)", "/ 60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow():", "to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time string.''' if date_time and date_time.__class__ is", "return dt_datetime( year, month, day, hour, minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time):", "= _time[9 + len(usec_buf):] elif _time[8] == 'z': offset = 0 if len(_time[9:]):", "month, day, hour, minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to", "def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a POSIX timestamp (like time.time()).''' return", "offset = 0 sign = '+' if date_time.tzinfo is not None: if date_time.tzinfo.__class__", "'+' if date_time.tzinfo is not None: if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO:", "RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date and time RFC3339", "date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object in local", "if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses by calling", "'0123456789': usec_buf += c else: break if len(usec_buf) > 6: raise ValueError('Invalid RFC3339", "timedelta, datetime as dt_datetime from time import time, gmtime from math import floor,", "return timedelta(0) def tzname(self, dt=None): sign = '+' if self.offset < 0: sign", "ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.') usec = 0 offset = None", "local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower()", "'+-': raise ValueError('Invalid RFC3339 string. Expected timezone.') negative = True if _time[0] ==", "raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec = int(usec_buf) if len(usec_buf) > 0", "raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset = (off_hour * 60) + off_minute", "= offset * -1 return dt_datetime( year, month, day, hour, minute, second, usec,", "+ '0' * (6 - len(usec_buf))) _time = _time[9 + len(usec_buf):] elif _time[8]", "is not None: if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support all tzinfo", "sign = '+' if date_time.tzinfo is not None: if date_time.tzinfo.__class__ is not TZFixedOffset:", "off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute) except ValueError: raise ValueError('Invalid", "'z': offset = 0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining data", "is None and (len(_time) == 0 or _time[0] == 'z'): offset = 0", "'0' * (6 - len(usec_buf))) _time = _time[9 + len(usec_buf):] elif _time[8] ==", "time from POSIX timestamp.''' if tz is None: tz = local_timezone elif tz.__class__", "in rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time separator.' ) (date,", "subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset", "TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware object in UTC with current", "in '0123456789': usec_buf += c else: break if len(usec_buf) > 6: raise ValueError('Invalid", "date_time and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time)", "'.': usec_buf = '' for c in _time[9:]: if c in '0123456789': usec_buf", "_time[9 + len(usec_buf):] elif _time[8] == 'z': offset = 0 if len(_time[9:]): raise", "not in '+-': raise ValueError('Invalid RFC3339 string. Expected timezone.') negative = True if", "= (t_full - timestamp) * 1e6 us = int(floor(frac + 0.5) if frac", "def now(): '''datetime aware object in local timezone with current date and time.'''", "def _get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60", "tz's local time from POSIX timestamp.''' if tz is None: tz = local_timezone", "if len(_time) > 8: if _time[8] == '.': usec_buf = '' for c", "if us == 1e6: timestamp += 1 us = 0 y, m, d,", "RFC3339 string. Remaining data after time zone.' ) elif offset is None: if", "(off_hour * 60) + off_minute if negative: offset = offset * -1 return", "date.') try: (hour, minute, second) = _time[:8].split(':') hour = int(hour) minute = int(minute)", "a mess usec = usec * int('1' + '0' * (6 - len(usec_buf)))", "usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time string.''' if", "in local timezone with current date and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string):", "offset): self.offset = offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def dst(self,", "object in local timezone with current date and time.''' return _timestamp_to_date_time(time(), local_timezone) def", "a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local", "minute, second) = _time[:8].split(':') hour = int(hour) minute = int(minute) second = int(second)", "string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't' not in rfc3339_string: raise ValueError(", "us == 1e6: timestamp += 1 us = 0 y, m, d, hh,", "0 if offset is None and (len(_time) == 0 or _time[0] == 'z'):", "as dt_datetime from time import time, gmtime from math import floor, ceil DATE_TIME_FORMAT", "dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0)", "+ len(usec_buf):] elif _time[8] == 'z': offset = 0 if len(_time[9:]): raise ValueError(", "if offset < 0: offset = offset * -1 sign = '-' return", "_time) = rfc3339_string.partition('t') if not date or not _time: raise ValueError('Invalid RFC3339 string.')", "= int(minute) second = int(second) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.')", "int(off_hour) off_minute = int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset", "in _time[9:]: if c in '0123456789': usec_buf += c else: break if len(usec_buf)", "utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware object in UTC with current date", "return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset", ") def _get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() /", "return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object in local timezone with current", "= '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset def utcoffset(self, dt=None):", "off_minute if negative: offset = offset * -1 return dt_datetime( year, month, day,", "'z'): offset = 0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining data", "sec > 59, set 59 (platform leap support) return dt_datetime(y, m, d, hh,", "0 offset = None if len(_time) > 8: if _time[8] == '.': usec_buf", "UTC datetime from a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string():", "if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' )", "is None: tz = local_timezone elif tz.__class__ is not TZFixedOffset: # TODO: Support", "elif _time[8] == 'z': offset = 0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339", "ValueError('Invalid RFC3339 string. Invalid date.') try: (hour, minute, second) = _time[:8].split(':') hour =", "None: if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses by", "offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def dst(self, dt=None): return timedelta(0)", "negative = True if _time[0] == '-' else False try: (off_hour, off_minute) =", "raise ValueError('Invalid RFC3339 string.') try: (year, month, day) = date.split('-') year = int(year)", "utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def dst(self, dt=None): return timedelta(0) def tzname(self,", "shit, but good damn multiplication precision makes # it a mess usec =", "not _time: raise ValueError('Invalid RFC3339 string.') try: (year, month, day) = date.split('-') year", "_get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware object in", "class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset", "not None: if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses", "try: (year, month, day) = date.split('-') year = int(year) month = int(month) day", "minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time", "= 0 offset = None if len(_time) > 8: if _time[8] == '.':", "import time, gmtime from math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo):", "multiplication precision makes # it a mess usec = usec * int('1' +", "date_time to RFC3339 compliant date-time string.''' if date_time and date_time.__class__ is not dt_datetime:", "+ (tzinfo.offset * 60) timestamp = int(floor(t_full)) frac = (t_full - timestamp) *", "_timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date and time RFC3339 compliant date-time string.'''", "not TZFixedOffset: # TODO: Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only", "60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset", "ceil(frac - 0.5)) if us == 1e6: timestamp += 1 us = 0", "True if _time[0] == '-' else False try: (off_hour, off_minute) = _time[1:].split(':') off_hour", "_time[0] == 'z'): offset = 0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string.", "-> tz's local time from POSIX timestamp.''' if tz is None: tz =", "self.offset < 0: sign = '-' return \"%s%d:%d\" % (sign, self.offset / 60,", "_timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object in local timezone with current date", "== '.': usec_buf = '' for c in _time[9:]: if c in '0123456789':", "= offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60) def dst(self, dt=None): return", "makes # it a mess usec = usec * int('1' + '0' *", "int(minute) second = int(second) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.') usec", "(len(_time) == 0 or _time[0] == 'z'): offset = 0 if len(_time[1:]): raise", "time zone.' ) else: _time = _time[8:] else: offset = 0 if offset", "POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date and", "int(floor(frac + 0.5) if frac >= 0.0 else ceil(frac - 0.5)) if us", "if len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec = int(usec_buf)", "TODO: Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset", "-1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min,", "'Invalid RFC3339 string. Missing \\'T\\' date/time separator.' ) (date, _, _time) = rfc3339_string.partition('t')", "len(usec_buf))) _time = _time[9 + len(usec_buf):] elif _time[8] == 'z': offset = 0", "is None: if _time[0] not in '+-': raise ValueError('Invalid RFC3339 string. Expected timezone.')", "False try: (off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute) except", "'''Current UTC date and time RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string():", "aware object in local timezone with current date and time.''' return _timestamp_to_date_time(time(), local_timezone)", "if date_time and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a datetime object.\") return", "% 60 ) def _get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts)", "mm, ss, us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset = 0 sign", "= int(second) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.') usec = 0", "return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset * 60) timestamp", "year = int(year) month = int(month) day = int(day) except ValueError: raise ValueError('Invalid", "us = int(floor(frac + 0.5) if frac >= 0.0 else ceil(frac - 0.5))", "* 1e6 us = int(floor(frac + 0.5) if frac >= 0.0 else ceil(frac", "ts = time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset =", "ss = min(ss, 59) # if sec > 59, set 59 (platform leap", "minute = int(minute) second = int(second) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid", "from datetime import tzinfo, timedelta, datetime as dt_datetime from time import time, gmtime", "string. Invalid timezone.') offset = (off_hour * 60) + off_minute if negative: offset", "date-time string.''' if date_time and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a datetime", "else: offset = 0 if offset is None and (len(_time) == 0 or", "Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp,", "(t_full - timestamp) * 1e6 us = int(floor(frac + 0.5) if frac >=", "Remaining data after time zone.' ) else: _time = _time[8:] else: offset =", "from a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC", "* -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour,", "len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec = int(usec_buf) if", "ss, us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset = 0 sign =", ").total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def", "after time zone.' ) elif offset is None: if _time[0] not in '+-':", "/ 60, self.offset % 60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full", "= int(floor(frac + 0.5) if frac >= 0.0 else ceil(frac - 0.5)) if", "== '-' else False try: (off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute", "def __init__(self, offset): self.offset = offset def utcoffset(self, dt=None): return timedelta(seconds=self.offset * 60)", "(off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute) except ValueError: raise", "subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp):", "ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' ) else: _time =", "60) + off_minute if negative: offset = offset * -1 return dt_datetime( year,", "'Invalid RFC3339 string. Remaining data after time zone.' ) elif offset is None:", "timestamp) * 1e6 us = int(floor(frac + 0.5) if frac >= 0.0 else", "month, day) = date.split('-') year = int(year) month = int(month) day = int(day)", "= (off_hour * 60) + off_minute if negative: offset = offset * -1", "timestamp += 1 us = 0 y, m, d, hh, mm, ss, weekday,", "gmtime from math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self,", "( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60, offset", "tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60, offset % 60 ) def", "'%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset def utcoffset(self, dt=None): return", "def utcnow_to_string(): '''Current UTC date and time RFC3339 compliant date-time string.''' return _format_date_time(utcnow())", "None: tz = local_timezone elif tz.__class__ is not TZFixedOffset: # TODO: Support all", "d, hh, mm, ss, weekday, jday, dst = gmtime(timestamp) ss = min(ss, 59)", "0.5) if frac >= 0.0 else ceil(frac - 0.5)) if us == 1e6:", "def utcnow(): '''datetime aware object in UTC with current date and time.''' return", "timestamp = int(floor(t_full)) frac = (t_full - timestamp) * 1e6 us = int(floor(frac", "= int(day) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.') try: (hour, minute,", "local_timezone elif tz.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses by", "string. Remaining data after time zone.' ) else: _time = _time[8:] else: offset", "all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz)", "- dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone =", "not in rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time separator.' )", "_time[9:]: if c in '0123456789': usec_buf += c else: break if len(usec_buf) >", "self.offset % 60) def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp", "usec = 0 offset = None if len(_time) > 8: if _time[8] ==", "damn multiplication precision makes # it a mess usec = usec * int('1'", "= date_time.timetuple() offset = 0 sign = '+' if date_time.tzinfo is not None:", "timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date and time", "(like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date and time RFC3339", "* 60) def dst(self, dt=None): return timedelta(0) def tzname(self, dt=None): sign = '+'", "dt_datetime from time import time, gmtime from math import floor, ceil DATE_TIME_FORMAT =", "calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp ->", "= rfc3339_string.replace(' ', '').lower() if 't' not in rfc3339_string: raise ValueError( 'Invalid RFC3339", "supported.') offset = date_time.tzinfo.offset if offset < 0: offset = offset * -1", "dst = gmtime(timestamp) ss = min(ss, 59) # if sec > 59, set", "by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset <", "- 0.5)) if us == 1e6: timestamp += 1 us = 0 y,", "'-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign,", "_format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local time from POSIX timestamp.'''", "* 60) + off_minute if negative: offset = offset * -1 return dt_datetime(", "is not dt_datetime: raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None):", "> 0 and len(usec_buf) < 6: # ugly as shit, but good damn", "= 0 y, m, d, hh, mm, ss, weekday, jday, dst = gmtime(timestamp)", "local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware", "if 't' not in rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time", "POSIX timestamp.''' if tz is None: tz = local_timezone elif tz.__class__ is not", "usec_buf = '' for c in _time[9:]: if c in '0123456789': usec_buf +=", "TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time string.''' if date_time", "set 59 (platform leap support) return dt_datetime(y, m, d, hh, mm, ss, us,", "RFC3339 string. Invalid time.') usec = 0 offset = None if len(_time) >", "after time zone.' ) else: _time = _time[8:] else: offset = 0 if", "date_time.tzinfo is not None: if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support all", "< 0: offset = offset * -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' %", "rfc3339_string.replace(' ', '').lower() if 't' not in rfc3339_string: raise ValueError( 'Invalid RFC3339 string.", "_time: raise ValueError('Invalid RFC3339 string.') try: (year, month, day) = date.split('-') year =", "math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset", "= '' for c in _time[9:]: if c in '0123456789': usec_buf += c", "_time[8:] else: offset = 0 if offset is None and (len(_time) == 0", "60 ) def _get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds()", "'').lower() if 't' not in rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\'", "zone.' ) elif offset is None: if _time[0] not in '+-': raise ValueError('Invalid", "def __repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset *", "if c in '0123456789': usec_buf += c else: break if len(usec_buf) > 6:", "len(usec_buf) > 0 and len(usec_buf) < 6: # ugly as shit, but good", "raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] ->", "== 1e6: timestamp += 1 us = 0 y, m, d, hh, mm,", "rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't' not in rfc3339_string: raise ValueError( 'Invalid", "hour = int(hour) minute = int(minute) second = int(second) except ValueError: raise ValueError('Invalid", "local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware object in UTC", "datetime from a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current", "'Invalid RFC3339 string. Remaining data after time zone.' ) else: _time = _time[8:]", "def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if", "string. Invalid time.') usec = 0 offset = None if len(_time) > 8:", "dt=None): return timedelta(0) def tzname(self, dt=None): sign = '+' if self.offset < 0:", "_timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace(' ',", "ValueError('Invalid RFC3339 string. Invalid fractions.') usec = int(usec_buf) if len(usec_buf) > 0 and", "data after time zone.' ) else: _time = _time[8:] else: offset = 0", "zone.' ) else: _time = _time[8:] else: offset = 0 if offset is", "_time = _time[8:] else: offset = 0 if offset is None and (len(_time)", "= local_timezone elif tz.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses", "time, gmtime from math import floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def", "'''timestamp -> UTC datetime from a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone)", "utcnow(): '''datetime aware object in UTC with current date and time.''' return _timestamp_to_date_time(time(),", "string. Invalid date.') try: (hour, minute, second) = _time[:8].split(':') hour = int(hour) minute", "date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date and time RFC3339 compliant date-time", "0.5)) if us == 1e6: timestamp += 1 us = 0 y, m,", "0: offset = offset * -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % (", "calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset < 0:", "y, m, d, hh, mm, ss, weekday, jday, dst = gmtime(timestamp) ss =", "= TZFixedOffset(0) def utcnow(): '''datetime aware object in UTC with current date and", "1 us = 0 y, m, d, hh, mm, ss, weekday, jday, dst", ") elif offset is None: if _time[0] not in '+-': raise ValueError('Invalid RFC3339", "= date.split('-') year = int(year) month = int(month) day = int(day) except ValueError:", "year, month, day, hour, minute, second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time", "ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset < 0: offset = offset", "aware object in UTC with current date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def", "len(usec_buf) < 6: # ugly as shit, but good damn multiplication precision makes", "return timedelta(seconds=self.offset * 60) def dst(self, dt=None): return timedelta(0) def tzname(self, dt=None): sign", "int(hour) minute = int(minute) second = int(second) except ValueError: raise ValueError('Invalid RFC3339 string.", "= TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime aware object in UTC with", "time.') usec = 0 offset = None if len(_time) > 8: if _time[8]", "with current date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware object", "tz is None: tz = local_timezone elif tz.__class__ is not TZFixedOffset: # TODO:", "int(usec_buf) if len(usec_buf) > 0 and len(usec_buf) < 6: # ugly as shit,", "return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string = rfc3339_string.replace('", "offset % 60 ) def _get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts) -", "gmtime(timestamp) ss = min(ss, 59) # if sec > 59, set 59 (platform", ") else: _time = _time[8:] else: offset = 0 if offset is None", "c in _time[9:]: if c in '0123456789': usec_buf += c else: break if", "frac >= 0.0 else ceil(frac - 0.5)) if us == 1e6: timestamp +=", "_time[0] not in '+-': raise ValueError('Invalid RFC3339 string. Expected timezone.') negative = True", "'''datetime aware object in local timezone with current date and time.''' return _timestamp_to_date_time(time(),", "Remaining data after time zone.' ) elif offset is None: if _time[0] not", "= None if len(_time) > 8: if _time[8] == '.': usec_buf = ''", "+= 1 us = 0 y, m, d, hh, mm, ss, weekday, jday,", "TODO: Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return", "int(floor(t_full)) frac = (t_full - timestamp) * 1e6 us = int(floor(frac + 0.5)", "\"%s%d:%d\" % (sign, self.offset / 60, self.offset % 60) def __repr__(self): return self.tzname()", "= time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset()", "1e6: timestamp += 1 us = 0 y, m, d, hh, mm, ss,", "if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' )", "(hour, minute, second) = _time[:8].split(':') hour = int(hour) minute = int(minute) second =", "mm, ss, weekday, jday, dst = gmtime(timestamp) ss = min(ss, 59) # if", "UTC with current date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime aware", "second = int(second) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.') usec =", "int('1' + '0' * (6 - len(usec_buf))) _time = _time[9 + len(usec_buf):] elif", "or not _time: raise ValueError('Invalid RFC3339 string.') try: (year, month, day) = date.split('-')", "negative: offset = offset * -1 return dt_datetime( year, month, day, hour, minute,", "data after time zone.' ) elif offset is None: if _time[0] not in", "_get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset", "current date and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time", "= int(usec_buf) if len(usec_buf) > 0 and len(usec_buf) < 6: # ugly as", "len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' ) elif", "date or not _time: raise ValueError('Invalid RFC3339 string.') try: (year, month, day) =", "'''timestamp[, tz] -> tz's local time from POSIX timestamp.''' if tz is None:", "RFC3339 string. Invalid date.') try: (hour, minute, second) = _time[:8].split(':') hour = int(hour)", "return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a POSIX timestamp", "8: if _time[8] == '.': usec_buf = '' for c in _time[9:]: if", "raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time separator.' ) (date, _, _time)", "= gmtime(timestamp) ss = min(ss, 59) # if sec > 59, set 59", "is not TZFixedOffset: # TODO: Support all tzinfo subclasses by calling utcoffset() raise", "tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60, offset % 60 )", "jday, dst = gmtime(timestamp) ss = min(ss, 59) # if sec > 59,", "-> UTC datetime from a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def", "ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.') try: (hour, minute, second) = _time[:8].split(':')", "us = 0 y, m, d, hh, mm, ss, weekday, jday, dst =", "offset = 0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after", "sign = '+' if self.offset < 0: sign = '-' return \"%s%d:%d\" %", "offset * -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday,", "datetime as dt_datetime from time import time, gmtime from math import floor, ceil", "- timestamp) * 1e6 us = int(floor(frac + 0.5) if frac >= 0.0", "', '').lower() if 't' not in rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing", "string. Expected timezone.') negative = True if _time[0] == '-' else False try:", "second, usec, TZFixedOffset(offset) ) def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time string.'''", "ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset def", "def _format_date_time(date_time): tm = date_time.timetuple() offset = 0 sign = '+' if date_time.tzinfo", "if date_time.tzinfo is not None: if date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support", "string.''' if date_time and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a datetime object.\")", "Invalid date.') try: (hour, minute, second) = _time[:8].split(':') hour = int(hour) minute =", "off_minute = int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset =", "day) = date.split('-') year = int(year) month = int(month) day = int(day) except", "fractions.') usec = int(usec_buf) if len(usec_buf) > 0 and len(usec_buf) < 6: #", "int(month) day = int(day) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid date.') try:", "raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' ) elif offset", "0: sign = '-' return \"%s%d:%d\" % (sign, self.offset / 60, self.offset %", "len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' ) else:", "offset is None: if _time[0] not in '+-': raise ValueError('Invalid RFC3339 string. Expected", "usec_buf += c else: break if len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string.", "dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone", "= int(off_hour) off_minute = int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.')", "return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date and time RFC3339 compliant date-time", "int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset = (off_hour *", "= int(floor(t_full)) frac = (t_full - timestamp) * 1e6 us = int(floor(frac +", "TZFixedOffset: # TODO: Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset", "supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a POSIX", "date_time.tzinfo.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses by calling utcoffset()", "_time = _time[9 + len(usec_buf):] elif _time[8] == 'z': offset = 0 if", "int(year) month = int(month) day = int(day) except ValueError: raise ValueError('Invalid RFC3339 string.", "Expected timezone.') negative = True if _time[0] == '-' else False try: (off_hour,", "= '+' if self.offset < 0: sign = '-' return \"%s%d:%d\" % (sign,", "= timestamp + (tzinfo.offset * 60) timestamp = int(floor(t_full)) frac = (t_full -", "raise ValueError('Invalid RFC3339 string. Invalid date.') try: (hour, minute, second) = _time[:8].split(':') hour", "offset = None if len(_time) > 8: if _time[8] == '.': usec_buf =", "object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local time from", "and len(usec_buf) < 6: # ugly as shit, but good damn multiplication precision", "object in UTC with current date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now():", "mess usec = usec * int('1' + '0' * (6 - len(usec_buf))) _time", "== 0 or _time[0] == 'z'): offset = 0 if len(_time[1:]): raise ValueError(", "% (sign, self.offset / 60, self.offset % 60) def __repr__(self): return self.tzname() def", "string.') try: (year, month, day) = date.split('-') year = int(year) month = int(month)", "# TODO: Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.')", "RFC3339 string.') try: (year, month, day) = date.split('-') year = int(year) month =", "and (len(_time) == 0 or _time[0] == 'z'): offset = 0 if len(_time[1:]):", "it a mess usec = usec * int('1' + '0' * (6 -", "tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60, offset % 60", "date and time RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date", "date_time.microsecond, sign, offset / 60, offset % 60 ) def _get_local_utc_offset(): ts =", "time zone.' ) elif offset is None: if _time[0] not in '+-': raise", "offset = date_time.tzinfo.offset if offset < 0: offset = offset * -1 sign", "raise ValueError('Invalid RFC3339 string. Expected timezone.') negative = True if _time[0] == '-'", "from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp,", "all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset", "return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local time from POSIX", "* int('1' + '0' * (6 - len(usec_buf))) _time = _time[9 + len(usec_buf):]", "== 'z': offset = 0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining", "min(ss, 59) # if sec > 59, set 59 (platform leap support) return", "def to_rfc3339_string(date_time): '''Serialize date_time to RFC3339 compliant date-time string.''' if date_time and date_time.__class__", "ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from", "= _time[8:] else: offset = 0 if offset is None and (len(_time) ==", "'-' else False try: (off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute =", "string. Remaining data after time zone.' ) elif offset is None: if _time[0]", "'''Serialize date_time to RFC3339 compliant date-time string.''' if date_time and date_time.__class__ is not", "6: # ugly as shit, but good damn multiplication precision makes # it", "( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset)", "usec = int(usec_buf) if len(usec_buf) > 0 and len(usec_buf) < 6: # ugly", "= rfc3339_string.partition('t') if not date or not _time: raise ValueError('Invalid RFC3339 string.') try:", "local time from POSIX timestamp.''' if tz is None: tz = local_timezone elif", "Support all tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset =", "offset = 0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after", "tz.__class__ is not TZFixedOffset: # TODO: Support all tzinfo subclasses by calling utcoffset()", "weekday, jday, dst = gmtime(timestamp) ss = min(ss, 59) # if sec >", "good damn multiplication precision makes # it a mess usec = usec *", "else ceil(frac - 0.5)) if us == 1e6: timestamp += 1 us =", "date_time.timetuple() offset = 0 sign = '+' if date_time.tzinfo is not None: if", "compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date and time RFC3339 compliant", "59 (platform leap support) return dt_datetime(y, m, d, hh, mm, ss, us, tzinfo)", "59, set 59 (platform leap support) return dt_datetime(y, m, d, hh, mm, ss,", "* 60) timestamp = int(floor(t_full)) frac = (t_full - timestamp) * 1e6 us", "= 0 if len(_time[9:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time", "'-' return \"%s%d:%d\" % (sign, self.offset / 60, self.offset % 60) def __repr__(self):", "utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC", ">= 0.0 else ceil(frac - 0.5)) if us == 1e6: timestamp += 1", "tm.tm_sec, date_time.microsecond, sign, offset / 60, offset % 60 ) def _get_local_utc_offset(): ts", "> 8: if _time[8] == '.': usec_buf = '' for c in _time[9:]:", "int(second) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid time.') usec = 0 offset", "t_full = timestamp + (tzinfo.offset * 60) timestamp = int(floor(t_full)) frac = (t_full", "and date_time.__class__ is not dt_datetime: raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def", "c else: break if len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string. Invalid fractions.')", "ss, weekday, jday, dst = gmtime(timestamp) ss = min(ss, 59) # if sec", "from POSIX timestamp.''' if tz is None: tz = local_timezone elif tz.__class__ is", "leap support) return dt_datetime(y, m, d, hh, mm, ss, us, tzinfo) def _format_date_time(date_time):", "time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339 compliant date-time string.''' rfc3339_string =", "(platform leap support) return dt_datetime(y, m, d, hh, mm, ss, us, tzinfo) def", "def tzname(self, dt=None): sign = '+' if self.offset < 0: sign = '-'", ") (date, _, _time) = rfc3339_string.partition('t') if not date or not _time: raise", "Invalid fractions.') usec = int(usec_buf) if len(usec_buf) > 0 and len(usec_buf) < 6:", "if _time[0] == '-' else False try: (off_hour, off_minute) = _time[1:].split(':') off_hour =", "if frac >= 0.0 else ceil(frac - 0.5)) if us == 1e6: timestamp", "ValueError( 'Invalid RFC3339 string. Remaining data after time zone.' ) elif offset is", "dt_datetime: raise ValueError(\"Expected a datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz]", "us, tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset = 0 sign = '+'", "= usec * int('1' + '0' * (6 - len(usec_buf))) _time = _time[9", "tzname(self, dt=None): sign = '+' if self.offset < 0: sign = '-' return", "'t' not in rfc3339_string: raise ValueError( 'Invalid RFC3339 string. Missing \\'T\\' date/time separator.'", "offset is None and (len(_time) == 0 or _time[0] == 'z'): offset =", "offset < 0: offset = offset * -1 sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d'", "0 and len(usec_buf) < 6: # ugly as shit, but good damn multiplication", "Invalid time.') usec = 0 offset = None if len(_time) > 8: if", "tzinfo): t_full = timestamp + (tzinfo.offset * 60) timestamp = int(floor(t_full)) frac =", "timestamp + (tzinfo.offset * 60) timestamp = int(floor(t_full)) frac = (t_full - timestamp)", "TZFixedOffset(0) def utcnow(): '''datetime aware object in UTC with current date and time.'''", "0 y, m, d, hh, mm, ss, weekday, jday, dst = gmtime(timestamp) ss", "timedelta(seconds=self.offset * 60) def dst(self, dt=None): return timedelta(0) def tzname(self, dt=None): sign =", "utcoffset() raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset < 0: offset", "= 0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time", "not date or not _time: raise ValueError('Invalid RFC3339 string.') try: (year, month, day)", "a POSIX timestamp (like time.time()).''' return _timestamp_to_date_time(timestamp, utc_timezone) def utcnow_to_string(): '''Current UTC date", "if negative: offset = offset * -1 return dt_datetime( year, month, day, hour,", "time() return ( dt_datetime.fromtimestamp(ts) - dt_datetime.utcfromtimestamp(ts) ).total_seconds() / 60 local_utc_offset = _get_local_utc_offset() local_timezone", "0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining data after time zone.'", "TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a", "timezone with current date and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse RFC3339", "__repr__(self): return self.tzname() def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset * 60)", "60 local_utc_offset = _get_local_utc_offset() local_timezone = TZFixedOffset(local_utc_offset) utc_timezone = TZFixedOffset(0) def utcnow(): '''datetime", "> 6: raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec = int(usec_buf) if len(usec_buf)", "sign = '-' return '%04d-%02d-%02dT%02d:%02d:%02d.%06d%c%02d:%02d' % ( tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec,", "(6 - len(usec_buf))) _time = _time[9 + len(usec_buf):] elif _time[8] == 'z': offset", "Missing \\'T\\' date/time separator.' ) (date, _, _time) = rfc3339_string.partition('t') if not date", "== 'z'): offset = 0 if len(_time[1:]): raise ValueError( 'Invalid RFC3339 string. Remaining", "off_hour = int(off_hour) off_minute = int(off_minute) except ValueError: raise ValueError('Invalid RFC3339 string. Invalid", "tz=None): '''timestamp[, tz] -> tz's local time from POSIX timestamp.''' if tz is", "string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date and time RFC3339 compliant date-time string.'''", "return dt_datetime(y, m, d, hh, mm, ss, us, tzinfo) def _format_date_time(date_time): tm =", "string. Missing \\'T\\' date/time separator.' ) (date, _, _time) = rfc3339_string.partition('t') if not", "59) # if sec > 59, set 59 (platform leap support) return dt_datetime(y,", "except ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset = (off_hour * 60)", "= _time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute) except ValueError: raise ValueError('Invalid RFC3339", "try: (off_hour, off_minute) = _time[1:].split(':') off_hour = int(off_hour) off_minute = int(off_minute) except ValueError:", "def _timestamp_to_date_time(timestamp, tzinfo): t_full = timestamp + (tzinfo.offset * 60) timestamp = int(floor(t_full))", "else: _time = _time[8:] else: offset = 0 if offset is None and", "floor, ceil DATE_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' class TZFixedOffset(tzinfo): def __init__(self, offset): self.offset = offset", "tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a POSIX timestamp (like time.time()).'''", "-1 return dt_datetime( year, month, day, hour, minute, second, usec, TZFixedOffset(offset) ) def", "0.0 else ceil(frac - 0.5)) if us == 1e6: timestamp += 1 us", "in UTC with current date and time.''' return _timestamp_to_date_time(time(), utc_timezone) def now(): '''datetime", "else: break if len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec", "'' for c in _time[9:]: if c in '0123456789': usec_buf += c else:", "tm.tm_min, tm.tm_sec, date_time.microsecond, sign, offset / 60, offset % 60 ) def _get_local_utc_offset():", "frac = (t_full - timestamp) * 1e6 us = int(floor(frac + 0.5) if", "timestamp.''' if tz is None: tz = local_timezone elif tz.__class__ is not TZFixedOffset:", "60, offset % 60 ) def _get_local_utc_offset(): ts = time() return ( dt_datetime.fromtimestamp(ts)", "local timezone with current date and time.''' return _timestamp_to_date_time(time(), local_timezone) def from_rfc3339_string(rfc3339_string): '''Parse", "tzinfo) def _format_date_time(date_time): tm = date_time.timetuple() offset = 0 sign = '+' if", "and time RFC3339 compliant date-time string.''' return _format_date_time(utcnow()) def now_to_string(): '''Local date and", "TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset < 0: offset = offset *", "ValueError: raise ValueError('Invalid RFC3339 string. Invalid timezone.') offset = (off_hour * 60) +", "1e6 us = int(floor(frac + 0.5) if frac >= 0.0 else ceil(frac -", "# it a mess usec = usec * int('1' + '0' * (6", "tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if", "raise ValueError('Only TZFixedOffset supported.') offset = date_time.tzinfo.offset if offset < 0: offset =", "(date, _, _time) = rfc3339_string.partition('t') if not date or not _time: raise ValueError('Invalid", "= min(ss, 59) # if sec > 59, set 59 (platform leap support)", "datetime object.\") return _format_date_time(date_time) def from_timestamp(timestamp, tz=None): '''timestamp[, tz] -> tz's local time", "tzinfo subclasses by calling utcoffset() raise ValueError('Only TZFixedOffset supported.') return _timestamp_to_date_time(timestamp, tz) def", "datetime import tzinfo, timedelta, datetime as dt_datetime from time import time, gmtime from", "date-time string.''' rfc3339_string = rfc3339_string.replace(' ', '').lower() if 't' not in rfc3339_string: raise", "+ off_minute if negative: offset = offset * -1 return dt_datetime( year, month,", "= True if _time[0] == '-' else False try: (off_hour, off_minute) = _time[1:].split(':')", "date.split('-') year = int(year) month = int(month) day = int(day) except ValueError: raise", "_timestamp_to_date_time(timestamp, tz) def from_utctimestamp(timestamp): '''timestamp -> UTC datetime from a POSIX timestamp (like", "break if len(usec_buf) > 6: raise ValueError('Invalid RFC3339 string. Invalid fractions.') usec =", "RFC3339 string. Invalid fractions.') usec = int(usec_buf) if len(usec_buf) > 0 and len(usec_buf)" ]
[]