hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
e43b2414ce43533302d4bb35c354bca0325dcc5d
414
py
Python
tdrs-backend/tdpservice/users/migrations/0011_auto_20210108_1741.py
amilash/TANF-app
09ef2baa854e41250c5761fa66aa665e112a53cf
[ "CC0-1.0" ]
18
2020-03-25T19:57:12.000Z
2021-07-26T15:37:50.000Z
tdrs-backend/tdpservice/users/migrations/0011_auto_20210108_1741.py
amilash/TANF-app
09ef2baa854e41250c5761fa66aa665e112a53cf
[ "CC0-1.0" ]
1,465
2020-07-22T21:16:53.000Z
2022-03-31T16:04:22.000Z
tdrs-backend/tdpservice/users/migrations/0011_auto_20210108_1741.py
amilash/TANF-app
09ef2baa854e41250c5761fa66aa665e112a53cf
[ "CC0-1.0" ]
15
2020-07-22T14:58:37.000Z
2021-06-22T17:29:55.000Z
# Generated by Django 3.1.4 on 2021-01-08 17:41 from django.contrib.auth.models import Group from django.db import migrations def remove_groups(apps, schema_editor): Group.objects.filter(name__in={"OFA Analyst"}).delete() class Migration(migrations.Migration): dependencies = [ ('users', '0010_remove_user_roles'), ] operations = [ migrations.RunPython(remove_groups), ]
20.7
59
0.698068
53
414
5.301887
0.792453
0.071174
0
0
0
0
0
0
0
0
0
0.05638
0.18599
414
19
60
21.789474
0.777448
0.108696
0
0
1
0
0.103542
0.059946
0
0
0
0
0
1
0.090909
false
0
0.181818
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
e44d36dda3c2c8efcc264a83da19035b1b0cf1e4
441
py
Python
src/Server/DatabaseSystem/Tools/delWeeklyData.py
Sniper970119/MemoryAssistInPython
434efd09edbda7919a3f754374add7f34912fab7
[ "MIT" ]
19
2019-02-11T01:39:32.000Z
2022-03-05T05:14:34.000Z
src/Server/DatabaseSystem/Tools/delWeeklyData.py
Sniper970119/MemoryAssistInPython
434efd09edbda7919a3f754374add7f34912fab7
[ "MIT" ]
1
2020-02-04T15:20:04.000Z
2020-06-04T05:02:06.000Z
src/Server/DatabaseSystem/Tools/delWeeklyData.py
Sniper970119/MemoryAssistInPython
434efd09edbda7919a3f754374add7f34912fab7
[ "MIT" ]
5
2019-04-20T07:23:25.000Z
2021-03-24T03:13:01.000Z
# -*- coding:utf-8 -*- from src.Server.Conf.config import * class DelWeeklyData(): """ 删除每周数据的数据库 """ def __init__(self): myclient = pymongo.MongoClient("localhost:27017") mydb = myclient["MemoryAssist"] self.weeklyCol = mydb["weekly_user"] pass def handle(self): """ 删除每周数据的数据库 :return: """ # 删除数据库中的所有文档 self.weeklyCol.delete_many({})
19.173913
57
0.548753
39
441
6.051282
0.794872
0.110169
0
0
0
0
0
0
0
0
0
0.019802
0.312925
441
22
58
20.045455
0.759076
0.145125
0
0
0
0
0.115502
0
0
0
0
0
0
1
0.222222
false
0.111111
0.111111
0
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
e45089ab6b656b0267475a1ee7cd43153334253d
839
py
Python
src/region_set_profiler/utils.py
stephenkraemer/regionset_profiler
f4597f025688982827a6b56a49ad92f23d61897c
[ "BSD-3-Clause" ]
2
2020-05-04T07:51:08.000Z
2022-02-22T15:37:37.000Z
src/region_set_profiler/utils.py
stephenkraemer/regionset_profiler
f4597f025688982827a6b56a49ad92f23d61897c
[ "BSD-3-Clause" ]
1
2020-05-04T07:57:50.000Z
2020-05-04T09:57:49.000Z
src/region_set_profiler/utils.py
stephenkraemer/regionset_profiler
f4597f025688982827a6b56a49ad92f23d61897c
[ "BSD-3-Clause" ]
null
null
null
import pickle import re import pandas as pd def parquet(fp, suffix=".p"): return re.sub(f"{suffix}$", ".parquet", fp) def p(fp, suffix=".tsv"): return re.sub(f"{suffix}$", ".p", fp) def assert_granges_are_sorted(df: pd.DataFrame): """ Args: df: must have columns ['Chromosome', 'Start', 'End'] Raises: AssertionError if df is not sorted on ['Chromosome', 'Start', 'End'] """ assert df["Chromosome"].is_monotonic_increasing assert ( df.groupby("Chromosome")[["Start", "End"]] .agg(lambda ser: ser.is_monotonic_increasing) .all() .all() ) def to_pickle(obj, fp, protocol=4): with open(fp, "wb") as fout: pickle.dump(obj, fout, protocol=protocol) def from_pickle(fp): with open(fp, "rb") as fin: return pickle.load(fin)
19.97619
76
0.595948
113
839
4.345133
0.477876
0.09165
0.10998
0.04888
0.07332
0
0
0
0
0
0
0.001567
0.239571
839
41
77
20.463415
0.768025
0.171633
0
0.095238
0
0
0.099248
0
0
0
0
0
0.142857
1
0.238095
false
0
0.142857
0.095238
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e453a95eae0003338d9a4f9004527b6d63fabdfe
353
py
Python
Python/CeV/Exercicios/ex72.py
WerickL/Learning
5a9a488f0422454e612439b89093d5bc11242e65
[ "MIT" ]
null
null
null
Python/CeV/Exercicios/ex72.py
WerickL/Learning
5a9a488f0422454e612439b89093d5bc11242e65
[ "MIT" ]
null
null
null
Python/CeV/Exercicios/ex72.py
WerickL/Learning
5a9a488f0422454e612439b89093d5bc11242e65
[ "MIT" ]
null
null
null
ext = ('zero', 'um', 'dois', 'três', 'quatro', 'cinco', 'seis', 'sete', 'oito', 'nove', 'dez', 'onze', 'doze', 'treze', 'quatorze', 'quinze', 'dezesseis', 'dezessete', 'dezoito', 'dezenove', 'vinte') val = int(input('Digite um valor:')) while not 0 <= val <= 20: val = int(input('Digite um valor:')) print(f'o valor digitado foi {ext[val]}')
50.428571
111
0.575071
47
353
4.319149
0.765957
0.059113
0.108374
0.167488
0.236453
0.236453
0
0
0
0
0
0.010101
0.15864
353
6
112
58.833333
0.673401
0
0
0.333333
0
0
0.487252
0
0
0
0
0
0
1
0
false
0
0
0
0
0.166667
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e4606069fb9517b60da3a77a0719df1a8865f3d9
346
py
Python
examples/med_sts_clinical.py
anwar1103/semantic-text-similarit
2d6729dd5073fb3e64169969277defe18676dafc
[ "MIT" ]
167
2019-07-28T00:08:36.000Z
2022-03-19T20:59:14.000Z
examples/med_sts_clinical.py
anwar1103/semantic-text-similarit
2d6729dd5073fb3e64169969277defe18676dafc
[ "MIT" ]
20
2019-07-23T07:47:51.000Z
2021-06-08T06:17:14.000Z
examples/med_sts_clinical.py
anwar1103/semantic-text-similarit
2d6729dd5073fb3e64169969277defe18676dafc
[ "MIT" ]
43
2019-07-26T00:06:28.000Z
2021-10-06T00:52:26.000Z
from semantic_text_similarity.models import ClinicalBertSimilarity from scipy.stats import pearsonr model = ClinicalBertSimilarity() predictions = model.predict([("The patient is sick.", "Grass is green."), ("A prescription of acetaminophen 325 mg was given."," The patient was given Tylenol.")]) print(predictions)
43.25
118
0.722543
39
346
6.358974
0.74359
0.080645
0
0
0
0
0
0
0
0
0
0.010638
0.184971
346
8
119
43.25
0.868794
0
0
0
0
0
0.331412
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.166667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
e466dc77b4c894a23498f934c7ce9264c4dbf8e9
3,839
py
Python
functions.py
BinaryVoid/indianrailwaybot
4d349a4f2864d3b368d4fdcb10c7595df89641ca
[ "MIT" ]
null
null
null
functions.py
BinaryVoid/indianrailwaybot
4d349a4f2864d3b368d4fdcb10c7595df89641ca
[ "MIT" ]
null
null
null
functions.py
BinaryVoid/indianrailwaybot
4d349a4f2864d3b368d4fdcb10c7595df89641ca
[ "MIT" ]
null
null
null
from urllib2 import urlopen import config import json import telepot #########VARIABLES############## site = 'http://api.railwayapi.com/' bot = telepot.Bot(config.telegram_key) railkey = config.railway_key ##########VARIABLES END######### def on_between_request(msg): invalid_msg = """Invalid command. /between helps you find trains between two stations. The right format is: _/between <source-code> <dest-code> <date>_ *Eg*: /between ndls bct 17-02 Try /help for more""" final_data = [] if len(msg['text'].split(" ")) != 4: bot.sendMessage(msg['chat']['id'], invalid_msg, parse_mode = 'Markdown') return 0 source = msg['text'].split(" ")[1] dest = msg['text'].split(" ")[2] date = msg['text'].split(" ")[3] response = urlopen(site + 'between/source/' + source + '/dest/' + dest + '/date/' + date + '/apikey/' + railkey) data = json.load(response) for i in range(len(data["train"])): dday = data['train'][i]['days'] running_days = "" for j in range(len(dday)): ddays = dday[j] if ddays['runs'] == 'Y': running_days += "*" + ddays['day-code'][0] + "* " if ddays['runs'] == 'N': running_days += ddays['day-code'][0] + " " a = "*" + str(data['train'][i]['no']) + "*. " + data['train'][i]['number'] + " " + data['train'][i]['name'] + " " + data['train'][i]['from']['code'] + "-->" + data['train'][i]['to']['code'] + "\n" b = running_days + "\n" c = "Source departure:*" + data['train'][i]['src_departure_time'] + "* Destination Arrival: *" + data['train'][i]['dest_arrival_time'] + "* Travel Time:*" + data['train'][i]['travel_time'] + "*" + "\n" final_data.append(a + b + c) xxx = "\n".join(final_data) try: bot.sendMessage(telepot.glance(msg)[2], xxx, parse_mode = 'Markdown') except: bot.sendMessage(telepot.glance(msg)[2], "No trains", parse_mode = 'Markdown') def on_pnr_request(msg): invalid_msg = """Invalid command. /pnr helps you check your pnr status. The right format is: *Eg*: /pnr 1234567890 Try /help for more """ if len(msg['text'].split(" ")) != 2: bot.sendMessage(msg['chat']['id'], invalid_msg, parse_mode = 'Markdown') return 0 pnr = msg['text'].split(" ")[1] response = urlopen(site + 'pnr_status/pnr/' + pnr + '/apikey/' + railkey) data = json.load(response) if data['response_code'] == 410: bot.sendMessage(msg['chat']['id'], "Flushed PNR or PNR not yet generated", parse_mode = 'Markdown') return 0 if data['response_code'] == 404: bot.sendMessage(msg['chat']['id'], "Source not responding. Please double check your PNR. If it still doesn't work, try after some time.", parse_mode = 'Markdown') return 0 pnr = data['pnr'] train_name = data['train_name'] train_num = data['train_num'] doj = data['doj'] class_name = data['class'] dest_code = data['reservation_upto']['code'] dest_name = data['reservation_upto']['name'] src_code = data['from_station']['code'] src_name = data['from_station']['name'] a1 = "train number: *" + train_num + "*\n" a2 = "train name: *" + train_name + "*\n" a3 = "date: *" + doj + "*\n" a = a1 + a2 + a3 b = "Source Station: *" + src_name + "(" + src_code + ")*" c = "Destination Station: *" + dest_name + "(" + dest_code + ")*" print a print b print c pas = data['passengers'] pass_details = [] for i in range(len(pas)): current_status = pas[i]['current_status'] booking_status = pas[i]['booking_status'] no = pas[i]['no'] pass_data = "*" + str(no) + ". " + booking_status + " " + current_status + "*" pass_details.append(pass_data) xxx = "\n".join(pass_details) d = "\nDetails of all the passengers: \n" final_response = "*" + pnr + "*\n" + a + "\n" + b + "\n" + c + "\n" + d + xxx try: bot.sendMessage(telepot.glance(msg)[2], final_response, parse_mode = 'Markdown') except: bot.sendMessage(telepot.glance(msg)[2], "PNR flushed", parse_mode = 'Markdown')
34.585586
203
0.615004
537
3,839
4.271881
0.268156
0.047079
0.039233
0.036617
0.276373
0.218832
0.126417
0.096774
0.096774
0.096774
0
0.013387
0.163324
3,839
110
204
34.9
0.700809
0.005731
0
0.172043
0
0.010753
0.318495
0
0.010753
0
0
0
0
0
null
null
0.064516
0.043011
null
null
0.032258
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
e46963dcdf85f6bd5fe63fdcc99449c80158337f
12,809
py
Python
ims/migrations/0001_initial.py
hisham2k9/IMS-and-CAPA
9f70988a6411c72ab4f0cbc818b84db58a28076f
[ "MIT" ]
null
null
null
ims/migrations/0001_initial.py
hisham2k9/IMS-and-CAPA
9f70988a6411c72ab4f0cbc818b84db58a28076f
[ "MIT" ]
15
2021-03-19T03:43:56.000Z
2022-03-12T00:30:55.000Z
ims/migrations/0001_initial.py
hisham2k9/IMS-and-CAPA
9f70988a6411c72ab4f0cbc818b84db58a28076f
[ "MIT" ]
null
null
null
# Generated by Django 3.0.3 on 2020-03-08 10:56 import datetime from django.conf import settings from django.db import migrations, models import django.db.models.deletion import ims.models import multiselectfield.db.fields class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('accounts', '0001_initial'), ] operations = [ migrations.CreateModel( name='closureimages', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='investigationimages', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='submissionimages', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='transferimages', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='validationimages', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='imsmodel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('pt_id', models.CharField(max_length=20)), ('pt_name', models.CharField(max_length=100)), ('pt_gender', models.CharField(choices=[('Male', 'Male'), ('Female', 'Female'), ('Other', 'Other')], max_length=50)), ('pt_age', models.IntegerField(blank=True, null=True)), ('datetime_creation', models.DateTimeField(default=datetime.datetime.now)), ('pt_room', models.CharField(max_length=100)), ('pt_department', models.CharField(max_length=100)), ('datetime_occurance', models.DateTimeField(default=datetime.datetime.now)), ('place_occurance', models.CharField(max_length=100)), ('anaesthesia_surgery', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Complications of Anaesthesia', 'Complications of Anaesthesia'), ('Intubation Related', 'Intubation related'), ('Accidental extubation', 'Accidental Extubation'), ('Wrong patient or wrong site-side', 'Wrong patient or wrong site-side'), ('Acute MI within 48 hours of surgery', 'Acute MI within 48 hours of surgery'), ('Sponge or Instrument or Needle left in situ', 'Sponge or Instrument or Needle left in situ'), ('Retained Foreign body removal', 'Retained foreign body removal'), ('Death in OT or within 48 hours of surgery ', 'Death in OT or within 48 hours of surgery')], max_length=300, null=True)), ('anaesthesia_surgery_others', models.CharField(blank=True, max_length=200, null=True)), ('communication_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Lab speciment improperly prepared/collected', 'Lab specimen improperly prepared'), ('Sample improperly labelled', 'Sample improperly labelled')], max_length=300, null=True)), ('communication_others', models.CharField(blank=True, max_length=200, null=True)), ('consent_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Consent not obtained or documented', 'Consent not obtained or documented'), ('Inadequete consent', 'Inadequete consent')], max_length=300, null=True)), ('consent_related_others', models.CharField(blank=True, max_length=200, null=True)), ('emergency_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Patient left against medical advice', 'Patient left against medical advice'), ('Reports and patients documents misplaced', 'Reports and patients documents misplaced')], max_length=300, null=True)), ('emergency_related_others', models.CharField(blank=True, max_length=200, null=True)), ('safety_falls_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Patient fall', 'Patient fall'), ('Self Inflicted injury', 'Self Inflicted injury'), ('Visitor Falls or Injury', 'Visitor Falls or Injury'), ('Assault on patient or Staff', 'Assault on patient or Staff'), ('Needle stick Injury', 'Needle stick Injury'), ('Hazardous Material Spillage', 'Hazardous Material Spillage')], max_length=300, null=True)), ('safety_falls_related_others', models.CharField(blank=True, max_length=200, null=True)), ('diagnosis_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Management of care', 'Management of care'), ('Delay in starting treatment', 'Delay in starting treatment'), ('Patient not seen by Doctor', 'Patient not seen by Doctor'), ('Repeat blood sample withdrawal', 'Repeat blood sample withdrawal')], max_length=300, null=True)), ('diagnosis_related_others', models.CharField(blank=True, max_length=200, null=True)), ('transfusion_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Variance in use of blood and blood products', 'Variance in use of blood and blood products'), ('Transfusion stopped due to reaction', 'Transfusion stopped due to reaction')], max_length=300, null=True)), ('transfusion_related_others', models.CharField(blank=True, max_length=200, null=True)), ('equipment_related', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Equipment not available', 'Equipment not available'), ('Equipment malfunctioned', 'Equipment malfunctioned')], max_length=300, null=True)), ('equipment_related_others', models.CharField(blank=True, max_length=200, null=True)), ('medication_variance', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Prescription error', 'Prescription error'), ('Indenting error', 'Indenting error'), ('Dispensing error', 'Dispensing error'), ('Administration error (Violation of any of the rights)', 'Administration error (Violation of any of the rights)'), ('Drug given to patient with known allergy.', 'Drug given to patient with known allergy.')], max_length=300, null=True)), ('medication_variance_others', models.CharField(blank=True, max_length=200, null=True)), ('misc', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Security related', 'Security related'), ('Theft of personal property', 'Theft of personal property'), ('Infrastructure failure or collapse', 'Infrastrucure failure or collapse')], max_length=300, null=True)), ('misc_others', models.CharField(blank=True, max_length=200, null=True)), ('management_of_care', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Non availability of doctor on call', 'Non availability of doctor on call')], max_length=300, null=True)), ('management_of_care_others', models.CharField(blank=True, max_length=200, null=True)), ('contributing_factors', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Language Barrier', 'Language Barrier'), ('Hearing problems', 'Hearing problems'), ('Limited Vision', 'Limited Vision'), ('Obesity', 'Obesity'), ('Seizures', 'Seizures'), ('Intoxication', 'Intoxication'), ('Physical handicaps', 'Physical handicaps')], max_length=300, null=True)), ('contributing_factors_others', models.CharField(blank=True, max_length=200, null=True)), ('pt_condition', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Well Oriented', 'Well Oriented'), ('Confused', 'Confused'), ('Sedated', 'Sedated'), ('Drowsy', 'Drowsy'), ('Hyperactive', 'Hyperactive'), ('Uncooperative', 'Uncooperative'), ('Violent', 'Violent')], max_length=300, null=True)), ('pt_condition_others', models.CharField(blank=True, max_length=200, null=True)), ('action_taken', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Informed Consultant or HOD', 'Informed Consultant or HOD'), ('Event recorded in the file', 'Event recorded in the file'), ('Seen by attending physician', 'Seen by attending physician')], max_length=300, null=True)), ('action_taken_others', models.CharField(blank=True, max_length=200, null=True)), ('severity_score', multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('No harm or No treatment', 'No harm or No treatment'), ('Insignificant harm or minimal treatment', 'Insignificant harm or minimal treatment'), ('Significant Physical intervention/Residual effect possible', 'Significant Physical intervention/Residual effect possible'), ('Major or extensive intervention/Residual effect', 'Major or extensive intervention/Residual effect'), ('Major potentially life threatening disability or residual effects', 'Major potentially life threatening disability or residual effects'), ('Death Imminent or predictable', 'Death Imminent or predictable'), ('Resultant Death', 'Resultant Death')], max_length=300, null=True)), ('severity_score_others', models.CharField(blank=True, max_length=200, null=True)), ('narration', models.TextField(blank=True, null=True)), ('relevant_info', models.TextField(blank=True, null=True)), ('report_to', models.CharField(max_length=100)), ('submission_update_timestamp', models.DateTimeField(default=datetime.datetime.now)), ('submit_confirm_switch', models.BooleanField(default=False)), ('transfer_comments_qa', models.TextField(blank=True, null=True)), ('transfer_comments_qa_by', models.CharField(max_length=100)), ('transfer_qa_comments_update_timestamp', models.DateTimeField(default=datetime.datetime.now)), ('transfer_qa_comments_confirm_switch', models.BooleanField(default=False)), ('investigation_comments_hod', models.TextField(blank=True, null=True)), ('investigation_by', models.CharField(blank=True, max_length=100, null=True)), ('investigation_update_timestamp', models.DateTimeField(default=datetime.datetime.now)), ('investigation_confirm_switch', models.BooleanField(default=False)), ('validation_comments_qa', models.TextField(blank=True, null=True)), ('validation_qa_by', models.CharField(max_length=100)), ('validation_confirm_update_timestamp', models.DateTimeField(default=datetime.datetime.now)), ('validation_confirm_switch', models.BooleanField(default=False)), ('comments_cqo', models.TextField(blank=True, null=True)), ('closure_by', models.CharField(blank=True, max_length=100, null=True)), ('closure_update_timestamp', models.DateTimeField(default=datetime.datetime.now)), ('closure_confirm_switch', models.BooleanField(default=False)), ('closure_confirm_user', models.ForeignKey(default=6, on_delete=models.SET(ims.models.get_sentinel_user), related_name='closure_person', to=settings.AUTH_USER_MODEL)), ('investigation_user', models.ForeignKey(default=6, on_delete=models.SET(ims.models.get_sentinel_user), related_name='investigator', to=settings.AUTH_USER_MODEL)), ('pt_doctor', models.ForeignKey(default='Unknown', on_delete=django.db.models.deletion.SET_DEFAULT, to='accounts.Doctors')), ('qa_transfer_to', models.ForeignKey(default=6, on_delete=models.SET(ims.models.get_sentinel_user), related_name='transfer_to_user', to=settings.AUTH_USER_MODEL)), ('submission_confirm_user', models.ForeignKey(default=6, on_delete=models.SET(ims.models.get_sentinel_user), to=settings.AUTH_USER_MODEL)), ('transfer_qa_comments_user', models.ForeignKey(default=6, on_delete=models.SET(ims.models.get_sentinel_user), related_name='transfer_user', to=settings.AUTH_USER_MODEL)), ('validation_confirm_user', models.ForeignKey(default=6, on_delete=models.SET(ims.models.get_sentinel_user), related_name='validator', to=settings.AUTH_USER_MODEL)), ], ), ]
102.472
751
0.684753
1,425
12,809
6.005614
0.195088
0.043118
0.039729
0.047675
0.675976
0.563566
0.462491
0.378359
0.256018
0.231947
0
0.01465
0.179327
12,809
124
752
103.298387
0.799467
0.003513
0
0.25641
1
0
0.363031
0.062921
0
0
0
0
0
1
0
false
0
0.051282
0
0.08547
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e482124563740f50db07ef319819174a176c24ca
2,036
py
Python
trial.py
pabloalarconm/EMB
440f491f4b4023e28e6336da34e1472d8f017fec
[ "MIT" ]
null
null
null
trial.py
pabloalarconm/EMB
440f491f4b4023e28e6336da34e1472d8f017fec
[ "MIT" ]
null
null
null
trial.py
pabloalarconm/EMB
440f491f4b4023e28e6336da34e1472d8f017fec
[ "MIT" ]
null
null
null
from embuilder.builder import EMB prefixes = dict( rdf = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" , rdfs = "http://www.w3.org/2000/01/rdf-schema#" , obo = "http://purl.obolibrary.org/obo/" , sio = "http://semanticscience.org/resource/" , xsd = "http://www.w3.org/2001/XMLSchema#", this = "http://my_example.com/") triplets = [ # sio nodes ["this:$(pid)_$(uniqid)_ID","sio:denotes","this:$(pid)_$(uniqid)_Role","iri"], ["this:$(pid)_$(uniqid)_Entity","sio:has-role","this:$(pid)_$(uniqid)_Role","iri"], ["this:$(pid)_$(uniqid)_Role","sio:is-realized-in","this:$(pid)_$(uniqid)_Process","iri"], ["this:$(pid)_$(uniqid)_Process","sio:has-output","this:$(pid)_$(uniqid)_Output","iri"], ["this:$(pid)_$(uniqid)_Output","sio:refers-to","this:$(pid)_$(uniqid)_Attribute","iri"], ["this:$(pid)_$(uniqid)_Entity","sio:has-attribute","this:$(pid)_$(uniqid)_Attribute","iri"], # sio types ["this:$(pid)_$(uniqid)_ID","rdf:type","sio:identifier","iri"], ["this:$(pid)_$(uniqid)_Entity","rdf:type","sio:person","iri"], ["this:$(pid)_$(uniqid)_Role","rdf:type","sio:role","iri"], ["this:$(pid)_$(uniqid)_Process","rdf:type","sio:process","iri"], ["this:$(pid)_$(uniqid)_Output","rdf:type","sio:information-content-entity","iri"], ["this:$(pid)_$(uniqid)_Attribute","rdf:type","sio:attribute","iri"], # data ["this:$(pid)_$(uniqid)_Output","sio:has-value","$(datetime)","xsd:date"]] config = dict( source_name = "source_cde_test", configuration = "ejp", # Two options for this parameter: # ejp: it defines CDE-in-a-Box references, being compatible with this workflow # csv: No workflow defined, set the source configuration for been used by CSV as data source csv_name = "source_1" # parameter only needed in case you pick "csv" as configuration ) build = EMB(config, prefixes,triplets) test = build.transform_ShEx("this") test2 = build.transform_YARRRML() test3 = build.transform_OBDA() print(test) print(test2) print(test3)
38.415094
120
0.642927
275
2,036
4.589091
0.381818
0.105388
0.195721
0.126783
0.263074
0.083994
0.083994
0.052298
0
0
0
0.014533
0.121316
2,036
53
121
38.415094
0.690889
0.141454
0
0
0
0
0.601379
0.320505
0
0
0
0
0
1
0
false
0
0.029412
0
0.029412
0.088235
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e4949bc47df0e2b29020d348b00a342ea80b91ab
607
py
Python
whoscored/parser.py
saromanov/whoscored-parser
a7fc7aa329b375d924c46b792409b162f8b5c638
[ "MIT" ]
null
null
null
whoscored/parser.py
saromanov/whoscored-parser
a7fc7aa329b375d924c46b792409b162f8b5c638
[ "MIT" ]
null
null
null
whoscored/parser.py
saromanov/whoscored-parser
a7fc7aa329b375d924c46b792409b162f8b5c638
[ "MIT" ]
3
2020-05-21T22:12:12.000Z
2022-03-21T12:48:11.000Z
from bs4 import BeatifulSoup from request import get_page class Parse: def __init__(self, url): self._url = url def parse(self): ''' Base method for parsing ''' page = get_page(self._url) soup = BeautifulSoup(page, 'html.parser') class ParseGame(Parse): def __init__(self, url): super().__init__(url) def __str__(self): ''' Return name of the game https://1xbet.whoscored.com/Matches/1376071/Live/England-Premier-League-2019-2020-Crystal-Palace-Bournemouth ''' return self._name
25.291667
116
0.601318
71
607
4.84507
0.619718
0.081395
0.069767
0.093023
0.110465
0
0
0
0
0
0
0.039535
0.291598
607
24
117
25.291667
0.760465
0.257002
0
0.153846
0
0
0.027778
0
0
0
0
0
0
1
0.307692
false
0
0.153846
0
0.692308
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
e49649cc2c1c1af2ee3885b3b722ce667fc34468
9,463
py
Python
tests/test_backend.py
bwind/pymogilefs
a3013f7ce3ec53ec1db1bd8b80041cb04126b6ea
[ "MIT" ]
3
2017-04-15T15:43:44.000Z
2021-03-08T22:16:28.000Z
tests/test_backend.py
bwind/pymogilefs
a3013f7ce3ec53ec1db1bd8b80041cb04126b6ea
[ "MIT" ]
8
2017-04-15T15:47:17.000Z
2018-10-01T14:12:29.000Z
tests/test_backend.py
bwind/pymogilefs
a3013f7ce3ec53ec1db1bd8b80041cb04126b6ea
[ "MIT" ]
3
2017-04-15T15:44:56.000Z
2018-09-14T06:09:26.000Z
from pymogilefs.backend import ( Backend, GetHostsConfig, CreateHostConfig, UpdateHostConfig, DeleteHostConfig, GetDomainsConfig, CreateDomainConfig, DeleteDomainConfig, CreateClassConfig, DeleteClassConfig, GetDevicesConfig, CreateDeviceConfig, SetStateConfig, SetWeightConfig, ) from pymogilefs.response import Response try: from unittest.mock import patch except ImportError: from mock import patch from unittest import TestCase class HostTestCase(TestCase): def test_get_hosts(self): return_value = Response('OK host6_hostip=10.0.0.25&host6_http_port=75' '00&host8_hostname=\r\n', GetHostsConfig) with patch.object(Backend, 'do_request', return_value=return_value): hosts = Backend([]).get_hosts().data expected = [{'hostip': '10.0.0.25', 'http_port': '7500'}, {'hostname': ''}] self.assertIn(expected[0], hosts['hosts'].values()) self.assertIn(expected[1], hosts['hosts'].values()) def test_get_hosts_with_hosts(self): return_value = Response('OK host6_hostip=10.0.0.25&host6_http_port=75' '00&host8_hostname=&hosts=10\r\n', GetHostsConfig) with patch.object(Backend, 'do_request', return_value=return_value): hosts = Backend([]).get_hosts().data expected = [{'hostip': '10.0.0.25', 'http_port': '7500'}, {'hostname': ''}] self.assertIn(expected[0], hosts['hosts'].values()) self.assertIn(expected[1], hosts['hosts'].values()) def test_create_host(self): return_value = Response('OK hostid=4&hostname=localhost\r\n', CreateHostConfig) with patch.object(Backend, 'do_request', return_value=return_value): response = Backend([]).create_host(host='localhost', ip='0.0.0.0', port=7001).data expected = {'id': '4', 'name': 'localhost'} self.assertEqual(response, expected) def test_update_host(self): return_value = Response('OK hostid=7&hostname=hostname\r\n', UpdateHostConfig) with patch.object(Backend, 'do_request', return_value=return_value): response = Backend([]).update_host(host='localhost', ip='0.0.0.0', port=7001).data expected = {'id': '7', 'name': 'hostname'} self.assertEqual(response, expected) def test_delete_host(self): return_value = Response('OK \r\n', DeleteHostConfig) with patch.object(Backend, 'do_request', return_value=return_value): response = Backend([]).delete_host(host='localhost').data self.assertEqual(response, {}) class DomainTestCase(TestCase): def test_get_domains(self): return_value = Response('OK domain15class1name=default&domain25class1' 'name=default&domain41class1mindevcount=2\r\n', GetDomainsConfig) with patch.object(Backend, 'do_request', return_value=return_value): domains = Backend([]).get_domains().data['domains'] self.assertEqual(domains[15]['classes'][1]['name'], 'default') self.assertEqual(domains[25]['classes'][1]['name'], 'default') self.assertEqual(domains[41]['classes'][1]['mindevcount'], '2') def test_get_domains_with_domains(self): return_value = Response('OK domain15class1name=default&domain25class1' 'name=default&domain41class1mindevcount=2&dom' 'ains=70\r\n', GetDomainsConfig) with patch.object(Backend, 'do_request', return_value=return_value): domains = Backend([]).get_domains().data['domains'] self.assertEqual(domains[15]['classes'][1]['name'], 'default') self.assertEqual(domains[25]['classes'][1]['name'], 'default') self.assertEqual(domains[41]['classes'][1]['mindevcount'], '2') def test_create_domain(self): return_value = Response('OK domain=testdomain\r\n', CreateDomainConfig) with patch.object(Backend, 'do_request', return_value=return_value): domains = Backend([]).create_domain('testdomain').data expected = {'domain': 'testdomain'} self.assertEqual(domains, expected) def test_delete_domain(self): return_value = Response('OK domain=testdomain\r\n', DeleteDomainConfig) with patch.object(Backend, 'do_request', return_value=return_value): domains = Backend([]).delete_domain('testdomain').data expected = {'domain': 'testdomain'} self.assertEqual(domains, expected) class ClassTestCase(TestCase): def test_create_class(self): return_value = Response('OK mindevcount=2&domain=testdomain&class=tes' 'tclass\r\n', CreateClassConfig) with patch.object(Backend, 'do_request', return_value=return_value): classes = Backend([]).create_class(domain='testdomain', _class='testclass', mindevcount=2).data expected = {'mindevcount': '2', 'domain': 'testdomain', 'class': 'testclass'} self.assertEqual(expected, classes) def test_update_class(self): return_value = Response('OK mindevcount=3&domain=testdomain&class=tes' 'tclass\r\n', CreateClassConfig) with patch.object(Backend, 'do_request', return_value=return_value): classes = Backend([]).update_class(domain='testdomain', _class='testclass', mindevcount=3).data expected = {'mindevcount': '3', 'domain': 'testdomain', 'class': 'testclass'} self.assertEqual(expected, classes) def test_delete_class(self): return_value = Response('OK domain=testdomain&class=testclass\r\n', DeleteClassConfig) with patch.object(Backend, 'do_request', return_value=return_value): classes = Backend([]).delete_class('testdomain', 'testclass').data expected = {'domain': 'testdomain', 'class': 'testclass'} self.assertEqual(classes, expected) class DeviceTestCase(TestCase): def test_get_devices(self): return_value = Response('OK dev27_mb_asof=&dev27_mb_total=1870562&dev' '26_mb_used=76672\r\n', GetDevicesConfig) with patch.object(Backend, 'do_request', return_value=return_value): devices = Backend([]).get_devices().data expected = [{'mb_asof': '', 'mb_total': '1870562'}, {'mb_used': '76672'}] self.assertIn(expected[0], devices['devices'].values()) self.assertIn(expected[1], devices['devices'].values()) def test_get_devices_with_devices(self): return_value = Response('OK dev27_mb_asof=&dev27_mb_total=1870562&dev' '26_mb_used=76672&devices=6\r\n', GetDevicesConfig) with patch.object(Backend, 'do_request', return_value=return_value): devices = Backend([]).get_devices().data expected = [{'mb_asof': '', 'mb_total': '1870562'}, {'mb_used': '76672'}] self.assertIn(expected[0], devices['devices'].values()) self.assertIn(expected[1], devices['devices'].values()) def test_create_device(self): return_value = Response('OK \r\n', CreateDeviceConfig) with patch.object(Backend, 'do_request', return_value=return_value): response = Backend([]).create_device(hostname='testhost10', devid=6, hostip='0.0.0.0', state='alive').data self.assertEqual(response, {}) class SetStateTestCase(TestCase): def test_set_state(self): return_value = Response('OK \r\n', SetStateConfig) with patch.object(Backend, 'do_request', return_value=return_value): response = Backend([]).set_state(host='localhost', device=7, state='down').data self.assertEqual(response, {}) class SetWeightTestCase(TestCase): def test_set_weight(self): return_value = Response('OK \r\n', SetWeightConfig) with patch.object(Backend, 'do_request', return_value=return_value): response = Backend([]).set_weight(host='testhost10', device=6, weight=8).data self.assertEqual(response, {})
47.552764
79
0.555321
901
9,463
5.660377
0.127636
0.11
0.085686
0.076667
0.758431
0.73451
0.69098
0.631961
0.631961
0.631961
0
0.030657
0.320934
9,463
198
80
47.792929
0.762994
0
0
0.474286
0
0
0.16686
0.067526
0
0
0
0
0.142857
1
0.097143
false
0
0.034286
0
0.165714
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e4ab831e2c3b79c4cd052a90b3662c77b170c390
9,122
py
Python
dendropy/__init__.py
EnjoyLifeFund/macHighSierra-py36-pkgs
5668b5785296b314ea1321057420bcd077dba9ea
[ "BSD-3-Clause", "BSD-2-Clause", "MIT" ]
null
null
null
dendropy/__init__.py
EnjoyLifeFund/macHighSierra-py36-pkgs
5668b5785296b314ea1321057420bcd077dba9ea
[ "BSD-3-Clause", "BSD-2-Clause", "MIT" ]
null
null
null
dendropy/__init__.py
EnjoyLifeFund/macHighSierra-py36-pkgs
5668b5785296b314ea1321057420bcd077dba9ea
[ "BSD-3-Clause", "BSD-2-Clause", "MIT" ]
null
null
null
#! /usr/bin/env python ############################################################################## ## DendroPy Phylogenetic Computing Library. ## ## Copyright 2010-2015 Jeet Sukumaran and Mark T. Holder. ## All rights reserved. ## ## See "LICENSE.rst" for terms and conditions of usage. ## ## If you use this work or any portion thereof in published work, ## please cite it as: ## ## Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library ## for phylogenetic computing. Bioinformatics 26: 1569-1571. ## ############################################################################## import sys ############################################################################### ## Populate the 'dendropy' namespace from dendropy.dataio.nexusprocessing import get_rooting_argument from dendropy.datamodel.basemodel import Annotation from dendropy.datamodel.basemodel import AnnotationSet from dendropy.datamodel.taxonmodel import Taxon from dendropy.datamodel.taxonmodel import TaxonNamespace from dendropy.datamodel.taxonmodel import TaxonNamespacePartition from dendropy.datamodel.taxonmodel import TaxonNamespaceMapping from dendropy.datamodel.taxonmodel import TaxonSet # Legacy from dendropy.datamodel.treemodel import Bipartition from dendropy.datamodel.treemodel import Edge from dendropy.datamodel.treemodel import Node from dendropy.datamodel.treemodel import Tree from dendropy.datamodel.treecollectionmodel import TreeList from dendropy.datamodel.treecollectionmodel import SplitDistribution from dendropy.datamodel.treecollectionmodel import TreeArray from dendropy.datamodel.charstatemodel import StateAlphabet from dendropy.datamodel.charstatemodel import DNA_STATE_ALPHABET from dendropy.datamodel.charstatemodel import RNA_STATE_ALPHABET from dendropy.datamodel.charstatemodel import NUCLEOTIDE_STATE_ALPHABET from dendropy.datamodel.charstatemodel import PROTEIN_STATE_ALPHABET from dendropy.datamodel.charstatemodel import BINARY_STATE_ALPHABET from dendropy.datamodel.charstatemodel import RESTRICTION_SITES_STATE_ALPHABET from dendropy.datamodel.charstatemodel import INFINITE_SITES_STATE_ALPHABET from dendropy.datamodel.charstatemodel import new_standard_state_alphabet from dendropy.datamodel.charmatrixmodel import CharacterDataSequence from dendropy.datamodel.charmatrixmodel import CharacterMatrix from dendropy.datamodel.charmatrixmodel import DnaCharacterDataSequence from dendropy.datamodel.charmatrixmodel import DnaCharacterMatrix from dendropy.datamodel.charmatrixmodel import NucleotideCharacterDataSequence from dendropy.datamodel.charmatrixmodel import NucleotideCharacterMatrix from dendropy.datamodel.charmatrixmodel import RnaCharacterDataSequence from dendropy.datamodel.charmatrixmodel import RnaCharacterMatrix from dendropy.datamodel.charmatrixmodel import ProteinCharacterDataSequence from dendropy.datamodel.charmatrixmodel import ProteinCharacterMatrix from dendropy.datamodel.charmatrixmodel import RestrictionSitesCharacterDataSequence from dendropy.datamodel.charmatrixmodel import RestrictionSitesCharacterMatrix from dendropy.datamodel.charmatrixmodel import InfiniteSitesCharacterDataSequence from dendropy.datamodel.charmatrixmodel import InfiniteSitesCharacterMatrix from dendropy.datamodel.charmatrixmodel import StandardCharacterDataSequence from dendropy.datamodel.charmatrixmodel import StandardCharacterMatrix from dendropy.datamodel.charmatrixmodel import ContinuousCharacterDataSequence from dendropy.datamodel.charmatrixmodel import ContinuousCharacterMatrix from dendropy.calculate.phylogeneticdistance import PhylogeneticDistanceMatrix from dendropy.datamodel.datasetmodel import DataSet from dendropy.utility.error import ImmutableTaxonNamespaceError from dendropy.utility.error import DataParseError from dendropy.utility.error import UnsupportedSchemaError from dendropy.utility.error import UnspecifiedSchemaError from dendropy.utility.error import UnspecifiedSourceError from dendropy.utility.error import TooManyArgumentsError from dendropy.utility.error import InvalidArgumentValueError from dendropy.utility.error import MultipleInitializationSourceError from dendropy.utility.error import TaxonNamespaceIdentityError from dendropy.utility.error import TaxonNamespaceReconstructionError from dendropy.utility.error import UltrametricityError from dendropy.utility.error import TreeSimTotalExtinctionException from dendropy.utility.error import SeedNodeDeletionException from dendropy.utility import deprecate ############################################################################### ## Legacy Support from dendropy.legacy import coalescent from dendropy.legacy import continuous from dendropy.legacy import treecalc from dendropy.legacy import popgensim from dendropy.legacy import popgenstat from dendropy.legacy import reconcile from dendropy.legacy import seqmodel from dendropy.legacy import seqsim from dendropy.legacy import treecalc from dendropy.legacy import treemanip from dendropy.legacy import treesim from dendropy.legacy import treesplit from dendropy.legacy import treesum ############################################################################### ## PACKAGE METADATA import collections version_info = collections.namedtuple("dendropy_version_info", ["major", "minor", "micro", "releaselevel"])( major=4, minor=3, micro=0, releaselevel="" ) __project__ = "DendroPy" __version__ = ".".join(str(s) for s in version_info[:4] if s != "") __author__ = "Jeet Sukumaran and Mark T. Holder" __copyright__ = "Copyright 2010-2015 Jeet Sukumaran and Mark T. Holder." __citation__ = "Sukumaran, J and MT Holder. 2010. DendroPy: a Python library for phylogenetic computing. Bioinformatics 26: 1569-1571." PACKAGE_VERSION = __version__ # for backwards compatibility (with sate) def _get_revision_object(): from dendropy.utility import vcsinfo __revision__ = vcsinfo.Revision(repo_path=homedir()) return __revision__ def revision_description(): __revision__ = _get_revision_object() if __revision__.is_available: revision_text = " ({})".format(__revision__) else: revision_text = "" return revision_text def name(): return "{} {}{}".format(__project__, __version__, revision_description()) def homedir(): import os try: try: __homedir__ = __path__[0] except AttributeError: __homedir__ = os.path.dirname(os.path.abspath(__file__)) except IndexError: __homedir__ = os.path.dirname(os.path.abspath(__file__)) except OSError: __homedir__ = None except: __homedir__ = None return __homedir__ def description(dest=None): import sys import site if dest is None: dest = sys.stdout fields = collections.OrderedDict() fields["DendroPy version"] = name() fields["DendroPy location"] = homedir() fields["Python version"] = sys.version.replace("\n", "") fields["Python executable"] = sys.executable try: fields["Python site packages"] = site.getsitepackages() except: pass max_fieldname_len = max(len(fieldname) for fieldname in fields) for fieldname, fieldvalue in fields.items(): dest.write("{fieldname:{fieldnamewidth}}: {fieldvalue}\n".format( fieldname=fieldname, fieldnamewidth=max_fieldname_len + 2, fieldvalue=fieldvalue)) def description_text(): from dendropy.utility.textprocessing import StringIO s = StringIO() description(s) return s.getvalue() def citation_info(include_preamble=True, width=76): import textwrap citation_lines = [] if include_preamble: citation_preamble =( "If any stage of your work or analyses relies" " on code or programs from this library, either" " directly or indirectly (e.g., through usage of" " your own or third-party programs, pipelines, or" " toolkits which use, rely on, incorporate, or are" " otherwise primarily derivative of code/programs" " in this library), please cite:" ) citation_lines.extend(textwrap.wrap(citation_preamble, width=width)) citation_lines.append("") citation = textwrap.wrap( __citation__, width=width, initial_indent=" ", subsequent_indent=" ", ) citation_lines.extend(citation) return citation_lines def tree_source_iter(*args, **kwargs): s = "No longer supported in DendroPy 4: Instead of 'tree_source_iter()', use 'Tree.yield_from_files()' instead" raise NotImplementedError(s) def multi_tree_source_iter(*args, **kwargs): s = "No longer supported in DendroPy 4: Instead of 'multi_tree_source_iter()', use 'Tree.yield_from_files()' instead" raise NotImplementedError(s) if __name__ == "__main__": description(sys.stdout)
42.826291
135
0.733173
923
9,122
7.061755
0.294691
0.134397
0.135318
0.099417
0.444768
0.171678
0.167536
0.126112
0.108009
0.06413
0
0.007051
0.160491
9,122
212
136
43.028302
0.844085
0.057334
0
0.088757
0
0.005917
0.11594
0.018119
0
0
0
0
0
1
0.053254
false
0.005917
0.467456
0.005917
0.556213
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
e4aeafa2659ca208d853bbc89b82cc763cd103af
606
py
Python
deeprank/utils/add_binaryClass.py
DeepRank/DeepRank_VariantPred
42cd85c7a521463ab2b644ef8da87c107a2b5bce
[ "Apache-2.0" ]
84
2018-01-18T02:49:10.000Z
2022-03-05T00:47:49.000Z
deeprank/utils/add_binaryClass.py
DeepRank/DeepRank_VariantPred
42cd85c7a521463ab2b644ef8da87c107a2b5bce
[ "Apache-2.0" ]
228
2018-03-01T15:05:17.000Z
2022-03-25T13:00:36.000Z
deeprank/utils/add_binaryClass.py
DeepRank/DeepRank_VariantPred
42cd85c7a521463ab2b644ef8da87c107a2b5bce
[ "Apache-2.0" ]
25
2018-03-01T14:56:28.000Z
2022-01-20T07:32:35.000Z
#!/usr/bin/env python # This script can be used to create/correct target values import glob import os from time import time import numpy as np import deeprank.generate.DataGenerator as DataGenerator path = './' database = [f for f in glob.glob(path + '*.hdf5')] print(database) # create binary target for hdf5_FL in database: print("Add binary class to %s" % hdf5_FL) data_set = DataGenerator( compute_targets=['deeprank.targets.binary_class'], hdf5=hdf5_FL) t0 = time() data_set.add_target(prog_bar=True) print(' ' * 25 + '--> Done in %f s.' % (time() - t0))
20.896552
58
0.673267
89
606
4.483146
0.52809
0.045113
0
0
0
0
0
0
0
0
0
0.018711
0.206271
606
28
59
21.642857
0.810811
0.160066
0
0
1
0
0.15415
0.057312
0
0
0
0
0
1
0
false
0
0.3125
0
0.3125
0.1875
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
e4b72c962ebf39ae5a6b73f84eb0bd0b3752ebc5
1,152
py
Python
cride/users/views/exchanges.py
albertoaldanar/betmatcherAPI
c0590025efd79f4e489f9c9433b17554ea6ba23f
[ "MIT" ]
null
null
null
cride/users/views/exchanges.py
albertoaldanar/betmatcherAPI
c0590025efd79f4e489f9c9433b17554ea6ba23f
[ "MIT" ]
7
2020-06-05T20:53:27.000Z
2022-03-11T23:47:12.000Z
cride/users/views/exchanges.py
albertoaldanar/betmatcherAPI
c0590025efd79f4e489f9c9433b17554ea6ba23f
[ "MIT" ]
null
null
null
"""Events API views""" #DRF from rest_framework.views import APIView from rest_framework import status, mixins, viewsets from rest_framework.response import Response from rest_framework.request import Request from rest_framework.decorators import api_view from rest_framework import generics from operator import itemgetter, attrgetter #Django from django.db.models import Q #Serializer from cride.users.serializers import PrizeModelSerializer, ExchangeModelSerializer #Utilities from heapq import nlargest #Models from cride.users.models import Prize, User, Exchange @api_view(["POST"]) def shipment_exchange(request): exchange = Exchange.objects.get(id = request.data["exchange"]) exchange.phone = request.data["phone"] exchange.email = request.data["email"] exchange.adress = request.data["adress"] exchange.country = request.data["country"] exchange.state = request.data["state"] exchange.city = request.data["city"] exchange.cp = request.data["cp"] exchange.full_name = request.data["full_name"] exchange.save() data = {"Exchange": ExchangeModelSerializer(exchange).data} return Response(data)
31.135135
81
0.767361
142
1,152
6.147887
0.380282
0.113402
0.116838
0.052692
0
0
0
0
0
0
0
0
0.130208
1,152
37
82
31.135135
0.871257
0.044271
0
0
0
0
0.057692
0
0
0
0
0
0
1
0.04
false
0
0.44
0
0.52
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
e4b9717c1041a7be4f9a9dc795c80e278462296f
409
py
Python
course_contents/flask_1/setup.py
officinaMusci/python_flask_course
b5f79037105d16998b337da485b879c578b7eea0
[ "MIT" ]
null
null
null
course_contents/flask_1/setup.py
officinaMusci/python_flask_course
b5f79037105d16998b337da485b879c578b7eea0
[ "MIT" ]
null
null
null
course_contents/flask_1/setup.py
officinaMusci/python_flask_course
b5f79037105d16998b337da485b879c578b7eea0
[ "MIT" ]
null
null
null
'''Fichier "app.py" à la racine du projet''' # 1. On importe le module Flask import flask # 2. On déclare la variable globale pour l'application app = flask.Flask(__name__) # 3. On lance le serveur en mode dev pour avoir le debug >>> export FLASK_ENV=development >>> flask run # 3.b Si on utilise un fichier différent (hello.py) : >>> export FLASK_ENV=development >>> export FLASK_APP=hello >>> flask run
24.058824
56
0.721271
67
409
4.298507
0.61194
0.114583
0.097222
0.173611
0
0
0
0
0
0
0
0.011799
0.171149
409
16
57
25.5625
0.837758
0.462103
0
0.571429
0
0
0
0
0
0
0
0
0
0
null
null
0
0.142857
null
null
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
e4bd6b7a6f11b7880e70a6ab5e0926e4d0b29594
1,272
py
Python
hcli_core/hcli_core.py
cometaj2/hcli_core
5363cb81843ddec41246e7fcfac7dfcce8bf4b8c
[ "MIT" ]
null
null
null
hcli_core/hcli_core.py
cometaj2/hcli_core
5363cb81843ddec41246e7fcfac7dfcce8bf4b8c
[ "MIT" ]
null
null
null
hcli_core/hcli_core.py
cometaj2/hcli_core
5363cb81843ddec41246e7fcfac7dfcce8bf4b8c
[ "MIT" ]
null
null
null
from __future__ import absolute_import, division, print_function import falcon import json import template from hcli import api from hcli import home from hcli import document from hcli import command from hcli import option from hcli import execution from hcli import finalexecution from hcli import parameter import config def connector(plugin_path=None): # We load the HCLI template in memory to reduce disk io config.set_plugin_path(plugin_path) config.parse_template(template.Template()) # We setup the HCLI Connector server = falcon.API() server.add_route(home.HomeController.route, api.HomeApi()) server.add_route(document.DocumentController.route, api.DocumentApi()) server.add_route(command.CommandController.route, api.CommandApi()) server.add_route(option.OptionController.route, api.OptionApi()) server.add_route(execution.ExecutionController.route, api.ExecutionApi()) server.add_route(finalexecution.FinalGetExecutionController.route, api.FinalExecutionApi()) server.add_route(finalexecution.FinalPostExecutionController.route, api.FinalExecutionApi()) server.add_route(parameter.ParameterController.route, api.ParameterApi()) return server
37.411765
100
0.764937
149
1,272
6.402685
0.375839
0.067086
0.1174
0.0587
0.081761
0.081761
0
0
0
0
0
0
0.16195
1,272
33
101
38.545455
0.894934
0.063679
0
0
0
0
0
0
0
0
0
0
0
1
0.038462
false
0
0.5
0
0.576923
0.038462
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
e4bd7d047f8ebfe0abcfe4a5ad4e65afc75d55fe
2,098
py
Python
configurator.py
Junyingli510/Linkedin-Scraper
f9f355b9a59b775efe7825bcd97c554cc4676173
[ "MIT" ]
1
2021-05-08T01:24:16.000Z
2021-05-08T01:24:16.000Z
configurator.py
Junyingli510/Linkedin-Scraper
f9f355b9a59b775efe7825bcd97c554cc4676173
[ "MIT" ]
null
null
null
configurator.py
Junyingli510/Linkedin-Scraper
f9f355b9a59b775efe7825bcd97c554cc4676173
[ "MIT" ]
1
2021-05-08T01:28:02.000Z
2021-05-08T01:28:02.000Z
from configparser import ConfigParser config = ConfigParser() config.add_section('system') config.add_section('linkedin') config.add_section('profiles_data') config.add_section('profiles_data_by_name') print("Welcome to the configuration process.") linkedin_username = "" while linkedin_username == "": print("Insert linkedin username.") print("> ", end="") linkedin_username = input() config.set('linkedin', 'username', linkedin_username) linkedin_password = "" while linkedin_password == "": print("Insert linkedin password.") print("> ", end="") linkedin_password = input() config.set('linkedin', 'password', linkedin_password) print("Insert the name of the .txt file that contains people profile urls.") print("Notice: It doesn't matter if it doesn't exist right now.") print("Leave blank for default option (profiles_data.txt)") print("> ", end="") input_file_name = input() input_file_name = input_file_name if not input_file_name == "" else "profiles_data.txt" config.set('profiles_data', 'input_file_name', input_file_name) with open(input_file_name, "w"): pass print("Insert the name of the .xlsx file that will contain the results of the scraping by profile url.") print("Leave blank for default option (results_profiles.xlsx)") print("> ", end="") output_file_name = input() output_file_name = output_file_name if not output_file_name == "" else "results_profiles.xlsx" config.set('profiles_data', 'output_file_name', output_file_name) print("Do you want to append to it the timestamp in order to prevent to overwrite past results?") print("Y for yes, N for no") print("Leave blank for default option (Y)") print("> ", end="") append_timestamp = input() append_timestamp = append_timestamp if not append_timestamp == "" else "Y" config.set('profiles_data', 'append_timestamp', append_timestamp) with open('config.ini', 'w') as f: config.write(f) print("") print("Configuration completed. You can now do scraping.") print("To scrape profile by url: execute do_scraping.py") print("To search profiles by name: execute search_profiles_by_name.py")
36.172414
104
0.743565
300
2,098
5.003333
0.28
0.069287
0.060626
0.035976
0.201865
0.164557
0
0
0
0
0
0
0.126787
2,098
57
105
36.807018
0.819323
0
0
0.106383
0
0
0.446139
0.043375
0
0
0
0
0
1
0
false
0.12766
0.021277
0
0.021277
0.425532
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
1
0
2
e4c03f1c9506695d9a330c6aed52570f29f12b3a
405
py
Python
main.py
YuhengZhi/demix
e7bd400b3901f51bfeb4012753a84c3c69aa78a6
[ "MIT" ]
null
null
null
main.py
YuhengZhi/demix
e7bd400b3901f51bfeb4012753a84c3c69aa78a6
[ "MIT" ]
null
null
null
main.py
YuhengZhi/demix
e7bd400b3901f51bfeb4012753a84c3c69aa78a6
[ "MIT" ]
null
null
null
import torchvision.models as models import torch.nn as nn import torch from torch.autograd import Variable models.alexnet() c = nn.ConvTranspose2d(1, 1, 5) print(c) input_feature = torch.Tensor([[2, 2], [2, 2]]) input_feature = Variable(input_feature) input_feature = input_feature[None, None, :, :] print(c(input_feature)) nn.Container import tensorflow as tf tf.Tensor(np.array())
21.315789
48
0.718519
60
405
4.75
0.416667
0.252632
0.077193
0.126316
0
0
0
0
0
0
0
0.023529
0.160494
405
18
49
22.5
0.814706
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.357143
0
0.357143
0.142857
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
e4c5999b2f7af243be2fe7deb3af2c5f836515df
2,429
py
Python
corc/cli/job.py
rasmunk/corc
2d2ba92ab791f50fa46e1ff2cdc0035925032671
[ "MIT" ]
2
2020-10-31T14:55:26.000Z
2022-02-07T19:53:33.000Z
corc/cli/job.py
rasmunk/corc
2d2ba92ab791f50fa46e1ff2cdc0035925032671
[ "MIT" ]
3
2020-08-27T14:10:16.000Z
2021-09-23T23:31:19.000Z
corc/cli/job.py
rasmunk/corc
2d2ba92ab791f50fa46e1ff2cdc0035925032671
[ "MIT" ]
1
2022-02-07T19:53:34.000Z
2022-02-07T19:53:34.000Z
from corc.defaults import CLUSTER, OCI, JOB, META, S3, STORAGE, PROVIDER from corc.cli.args import extract_arguments from corc.cli.providers.helpers import select_provider from corc.job import ( run as api_run, get_results as api_get_results, delete_results as api_delete_results, list_results as api_list_results, ) def run(args): provider_kwargs = vars(extract_arguments(args, [PROVIDER])) cluster_kwargs = vars(extract_arguments(args, [CLUSTER])) meta_kwargs = vars(extract_arguments(args, [META])) job_kwargs = vars(extract_arguments(args, [JOB])) storage_kwargs = vars(extract_arguments(args, [STORAGE])) s3_kwargs = vars(extract_arguments(args, [S3])) provider = select_provider(provider_kwargs, default_fallback=True, verbose=True) if not provider: return False provider_kwargs = vars(extract_arguments(args, [provider.upper()])) job_kwargs["meta"] = meta_kwargs action_kwargs = dict( cluster_kwargs=cluster_kwargs, job_kwargs=job_kwargs, storage_kwargs=storage_kwargs, staging_kwargs=s3_kwargs, ) api_run(provider, provider_kwargs, action_kwargs) def get_results(args): oci_args = vars(extract_arguments(args, [OCI])) job_args = vars(extract_arguments(args, [JOB])) staging_args = vars(extract_arguments(args, [STORAGE])) s3_args = vars(extract_arguments(args, [S3])) if oci_args: return api_get_results( job_kwargs=job_args, staging_kwargs=staging_args, storage_kwargs=s3_args, ) def delete_results(args): job_args = vars(extract_arguments(args, [JOB])) staging_args = vars(extract_arguments(args, [STORAGE])) s3_args = vars(extract_arguments(args, [S3])) return api_delete_results( job_kwargs=job_args, staging_kwargs=staging_args, storage_kwargs=s3_args ) def list_results(args): oci_args = vars(extract_arguments(args, [OCI])) job_args = vars(extract_arguments(args, [JOB])) staging_args = vars(extract_arguments(args, [STORAGE])) s3_args = vars(extract_arguments(args, [S3])) if oci_args: if "extra_kwargs" in s3_args: storage_extra_kwargs = s3_args.pop("extra_kwargs") return api_list_results( job_kwargs=job_args, staging_kwargs=staging_args, storage_kwargs=s3_args, storage_extra_kwargs=storage_extra_kwargs, )
33.273973
85
0.706464
314
2,429
5.133758
0.136943
0.188586
0.223325
0.26799
0.579404
0.472084
0.451613
0.394541
0.394541
0.394541
0
0.007661
0.193907
2,429
72
86
33.736111
0.815628
0
0
0.224138
0
0
0.011527
0
0
0
0
0
0
1
0.068966
false
0
0.068966
0
0.206897
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
e4c6357a57a9a7f1fa2506e88bc63125e70b73a2
6,203
py
Python
spyql/output_handler.py
alin23/spyql
41105b7c536ae21139d0d89cfb6b2a8b6deebf1e
[ "MIT" ]
432
2021-08-17T16:52:36.000Z
2022-03-30T15:33:26.000Z
spyql/output_handler.py
alin23/spyql
41105b7c536ae21139d0d89cfb6b2a8b6deebf1e
[ "MIT" ]
48
2021-07-27T17:18:05.000Z
2022-03-15T09:33:01.000Z
spyql/output_handler.py
alin23/spyql
41105b7c536ae21139d0d89cfb6b2a8b6deebf1e
[ "MIT" ]
16
2021-11-27T15:42:42.000Z
2022-02-16T11:36:37.000Z
from spyql.nulltype import Null class OutputHandler: """Mediates data processing with data writting""" @staticmethod def make_handler(prs): """ Chooses the right handler depending on the kind of query and eventual optimization opportunities """ if prs["group by"] and not prs["partials"]: return GroupByDelayedOutSortAtEnd( prs["order by"], prs["limit"], prs["offset"] ) if prs["order by"]: # TODO optimization: use special handler that only keeps the top n elements # in memory when LIMIT is defined if prs["distinct"]: return DistinctDelayedOutSortAtEnd( prs["order by"], prs["limit"], prs["offset"] ) return DelayedOutSortAtEnd(prs["order by"], prs["limit"], prs["offset"]) if prs["distinct"]: return LineInDistinctLineOut(prs["limit"], prs["offset"]) return LineInLineOut(prs["limit"], prs["offset"]) def __init__(self, limit, offset): self.limit = limit self.rows_written = 0 self.offset = offset if offset else 0 def set_writer(self, writer): self.writer = writer def handle_result(self, result, group_key, sort_keys): """ To be implemented by child classes to handle a new output row (aka result). All inputs should be tuples. """ return self.is_done() def is_done(self): # premature ending return self.limit is not None and self.rows_written >= self.limit def write(self, row): if self.offset > 0: self.offset = self.offset - 1 else: self.writer.writerow(row) self.rows_written = self.rows_written + 1 def finish(self): self.writer.flush() class LineInLineOut(OutputHandler): """Simple handler that immediately writes every processed row""" def handle_result(self, result, *_): self.write(result) return self.is_done() def finish(self): super().finish() class LineInDistinctLineOut(OutputHandler): """In-memory distinct handler that immediately writes every non-duplicated row""" def __init__(self, limit, offset): super().__init__(limit, offset) self.output_rows = set() def handle_result(self, result, *_): # uses a dict to store distinct results instead of storing all rows if result in self.output_rows: return False # duplicate self.output_rows.add(result) self.write(result) return self.is_done() def finish(self): super().finish() class DelayedOutSortAtEnd(OutputHandler): """ Only writes after collecting and sorting all data. Temporary implementation that reads every processed row into memory. """ def __init__(self, orderby, limit, offset): super().__init__(limit, offset) self.orderby = orderby self.output_rows = [] def handle_result(self, result, sort_keys, *_): self.output_rows.append({"data": result, "sort_keys": sort_keys}) # TODO use temporary files to write `output_rows` whenever it gets too large # TODO sort intermediate results before writing to a temporary file return False # no premature endings here def finish(self): # TODO read and merge previously sorted temporary files (look into heapq.merge) # 1. sorts everything if self.orderby: for i in reversed(range(len(self.orderby))): # taking advantage of list.sort being stable to sort elements from minor # to major criteria (not be the most efficient way but straightforward) self.output_rows.sort( key=lambda row: ( # handle of NULLs based on NULLS FIRST/LAST specification (row["sort_keys"][i] is Null) != self.orderby[i]["rev_nulls"], row["sort_keys"][i], ), reverse=self.orderby[i]["rev"], # handles ASC/DESC order ) # 2. writes sorted rows to output for row in self.output_rows: # it would be more efficient to slice `output_rows` based on limit/offset # however, this is more generic with less repeated logic and this is a # temporary implementation if self.is_done(): break self.write(row["data"]) super().finish() class GroupByDelayedOutSortAtEnd(DelayedOutSortAtEnd): """ Extends `DelayedOutSortAtEnd` to only store intermediate group by results instead of keeping all rows in memory """ def __init__(self, orderby, limit, offset): super().__init__(orderby, limit, offset) self.output_rows = dict() def handle_result(self, result, sort_keys, group_key): # uses a dict to store intermidiate group by results instead of storing all rows self.output_rows[group_key] = {"data": result, "sort_keys": sort_keys} return False # no premature endings here def finish(self): # converts output_rows dict to list so that it can be sorted and written self.output_rows = list(self.output_rows.values()) super().finish() class DistinctDelayedOutSortAtEnd(DelayedOutSortAtEnd): """ Alters `DelayedOutSortAtEnd` to only store distinct results instead of keeping all rows in memory """ def __init__(self, orderby, limit, offset): super().__init__(orderby, limit, offset) self.output_rows = dict() def handle_result(self, result, sort_keys, *_): # uses a dict to store distinct results instead of storing all rows if result not in self.output_rows: self.output_rows[result] = sort_keys return False # no premature endings here def finish(self): # converts output_rows dict to list so that it can be sorted and written self.output_rows = [ {"data": k, "sort_keys": v} for k, v in self.output_rows.items() ] super().finish()
35.244318
88
0.616798
744
6,203
5.017473
0.258065
0.053576
0.060005
0.030538
0.375837
0.303777
0.283954
0.254487
0.254487
0.213233
0
0.001597
0.293245
6,203
175
89
35.445714
0.849909
0.311946
0
0.393939
0
0
0.046715
0
0
0
0
0.011429
0
1
0.212121
false
0
0.010101
0.010101
0.414141
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
0
0
0
2
e4db61c44b0ae216f68c1760571a6b216cab86fa
2,042
py
Python
sleepproxy/dnsserve.py
hkjinlee/SleepProxyServer
b2914fcf45ed7ae29882e97dfb574fd1bd20d8b2
[ "BSD-2-Clause" ]
5
2015-01-07T08:42:25.000Z
2019-06-10T09:14:32.000Z
sleepproxy/dnsserve.py
hkjinlee/SleepProxyServer
b2914fcf45ed7ae29882e97dfb574fd1bd20d8b2
[ "BSD-2-Clause" ]
null
null
null
sleepproxy/dnsserve.py
hkjinlee/SleepProxyServer
b2914fcf45ed7ae29882e97dfb574fd1bd20d8b2
[ "BSD-2-Clause" ]
5
2015-05-10T07:08:39.000Z
2022-02-07T06:21:07.000Z
import struct import dns.message import dns.reversename import IPy import netifaces from sleepproxy.manager import manage_host def handle(server, raddress, message): try: message = dns.message.from_wire(message) except: print "Error decoding DNS message" return if message.edns < 0: print "Received non-EDNS message, ignoring" return if not (message.opcode() == 5 and message.authority): print "Received non-UPDATE message, ignoring" return info = {'records': [], 'addresses': []} # Try to guess the interface this came in on for iface in netifaces.interfaces(): ifaddresses = netifaces.ifaddresses(iface) for af, addresses in ifaddresses.items(): if af != 2: # AF_INET continue for address in addresses: net = IPy.IP(address['addr']).make_net(address['netmask']) if IPy.IP(raddress[0]) in net: info['mymac'] = ifaddresses[17][0]['addr'] info['myif'] = iface for rrset in message.authority: info['records'].append(rrset) _add_addresses(info, rrset) for option in message.options: if option.otype == 2: info['ttl'] = struct.unpack("!L", option.data) if option.otype == 4: info['othermac'] = option.data.encode('hex_codec')[4:] # TODO: endsflags seems to indicate some other TTL # TODO: Better composability manage_host(info) _answer(server.socket, raddress, message) def _add_addresses(info, rrset): # Not sure if this is the correct way to detect addresses. if rrset.rdtype != dns.rdatatype.PTR or rrset.rdclass != dns.rdataclass.IN: return # Meh. if not rrset.name.to_text().endswith('.arpa.'): return info['addresses'].append(dns.reversename.to_address(rrset.name)) def _answer(sock, address, query): response = dns.message.make_response(query) sock.sendto(response.to_wire(), address)
28.760563
79
0.622919
250
2,042
5.024
0.436
0.031847
0.025478
0.033439
0
0
0
0
0
0
0
0.006662
0.264936
2,042
70
80
29.171429
0.830113
0.092067
0
0.104167
0
0
0.098592
0
0
0
0
0.014286
0
0
null
null
0
0.125
null
null
0.0625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
2
e4ebb14534a344d3c708feeec73a09822a512fa1
755
py
Python
core/src/zeit/cms/testcontenttype/testcontenttype.py
rickdg/vivi
16134ac954bf8425646d4ad47bdd1f372e089355
[ "BSD-3-Clause" ]
5
2019-05-16T09:51:29.000Z
2021-05-31T09:30:03.000Z
core/src/zeit/cms/testcontenttype/testcontenttype.py
rickdg/vivi
16134ac954bf8425646d4ad47bdd1f372e089355
[ "BSD-3-Clause" ]
107
2019-05-24T12:19:02.000Z
2022-03-23T15:05:56.000Z
core/src/zeit/cms/testcontenttype/testcontenttype.py
rickdg/vivi
16134ac954bf8425646d4ad47bdd1f372e089355
[ "BSD-3-Clause" ]
3
2020-08-14T11:01:17.000Z
2022-01-08T17:32:19.000Z
import zeit.cms.content.metadata import zeit.cms.interfaces import zeit.cms.testcontenttype.interfaces import zeit.cms.type import zope.interface @zope.interface.implementer( zeit.cms.testcontenttype.interfaces.IExampleContentType, zeit.cms.interfaces.IEditorialContent) class ExampleContentType(zeit.cms.content.metadata.CommonMetadata): """A type for testing.""" default_template = ( '<testtype xmlns:py="http://codespeak.net/lxml/objectify/pytype">' '<head/><body/></testtype>') class ExampleContentTypeType(zeit.cms.type.XMLContentTypeDeclaration): factory = ExampleContentType interface = zeit.cms.testcontenttype.interfaces.IExampleContentType type = 'testcontenttype' register_as_type = False
29.038462
74
0.765563
78
755
7.371795
0.487179
0.109565
0.090435
0.166957
0.177391
0
0
0
0
0
0
0
0.124503
755
25
75
30.2
0.869894
0.025166
0
0
0
0
0.142661
0.034294
0
0
0
0
0
1
0
false
0
0.294118
0
0.705882
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
900dc291bcb088e38141f27515fc928e116380b6
568
py
Python
opgc/apps/githubs/migrations/0004_auto_20210223_2149.py
DirtyBoyz/opgc_backend
3fc7160d9d637378a358d4fc161fae495a18f0bd
[ "MIT" ]
9
2020-08-19T02:49:03.000Z
2022-03-04T10:50:18.000Z
opgc/apps/githubs/migrations/0004_auto_20210223_2149.py
DirtyBoyz/opgc_backend
3fc7160d9d637378a358d4fc161fae495a18f0bd
[ "MIT" ]
7
2021-07-20T15:13:07.000Z
2022-03-08T13:14:43.000Z
opgc/apps/githubs/migrations/0004_auto_20210223_2149.py
DirtyBoyz/opgc_backend
3fc7160d9d637378a358d4fc161fae495a18f0bd
[ "MIT" ]
2
2021-08-13T08:05:34.000Z
2022-02-28T05:51:06.000Z
# Generated by Django 2.2.17 on 2021-02-23 21:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('githubs', '0003_auto_20210214_0120'), ] operations = [ migrations.AddField( model_name='githubuser', name='total_stargazers_count', field=models.IntegerField(default=0), ), migrations.AddField( model_name='repository', name='stargazers_count', field=models.IntegerField(default=0), ), ]
23.666667
49
0.59507
56
568
5.892857
0.660714
0.109091
0.139394
0.163636
0.278788
0.278788
0.278788
0
0
0
0
0.085
0.295775
568
23
50
24.695652
0.74
0.080986
0
0.352941
1
0
0.169231
0.086538
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
901be15244c5f3098180fe44dafb174501ebf728
9,875
py
Python
src/speechkit/_auth.py
drygdryg/yandex-speechkit-lib-python
b45bbb6550d5bb77289ff0462e794cbc8b0669a0
[ "MIT" ]
2
2021-12-13T20:29:00.000Z
2022-01-28T13:02:54.000Z
src/speechkit/_auth.py
drygdryg/yandex-speechkit-lib-python
b45bbb6550d5bb77289ff0462e794cbc8b0669a0
[ "MIT" ]
1
2021-07-26T00:36:18.000Z
2021-07-26T13:52:59.000Z
src/speechkit/_auth.py
drygdryg/yandex-speechkit-lib-python
b45bbb6550d5bb77289ff0462e794cbc8b0669a0
[ "MIT" ]
2
2022-02-08T12:39:26.000Z
2022-03-12T19:08:18.000Z
import functools import time import jwt import requests from speechkit.exceptions import RequestError def generate_jwt(service_account_id, key_id, private_key, exp_time=360): """ Generating JWT token for authorisation :param string service_account_id: The ID of the service account whose key the JWT is signed with. :param string key_id: The ID of the Key resource belonging to the service account. :param bytes private_key: Private key given from Yandex Cloud console in bytes :param integer exp_time: Optional. The token expiration time delta in seconds. The expiration time must not exceed the issue time by more than one hour, meaning exp_time ≤ 3600. Default 360 :return: JWT token :rtype: string """ if not isinstance(service_account_id, str) or not isinstance(key_id, str): raise ValueError("service_account_id, key_id, must be strings.") if 0 in (len(service_account_id), len(key_id)): raise ValueError("service_account_id, key_id, can't be empty.") if not isinstance(private_key, bytes): raise ValueError("private_key must be bytes string, but got {}".format(type(private_key))) if not isinstance(exp_time, int): raise ValueError("exp_time must be int, but got {}".format(type(exp_time))) if exp_time > 3600: raise ValueError("exp_time ≤ 3600, but got {}".format(exp_time)) now = int(time.time()) payload = { 'aud': 'https://iam.api.cloud.yandex.net/iam/v1/tokens', 'iss': service_account_id, 'iat': now, 'exp': now + exp_time } return jwt.encode( payload, private_key, algorithm='PS256', headers={'kid': key_id} ) def get_iam_token(yandex_passport_oauth_token=None, jwt_token=None): """ Creates an IAM token for the specified identity. `Getting IAM for Yandex account <https://cloud.yandex.com/en/docs/iam/operations/iam-token/create>`_ :param string yandex_passport_oauth_token: OAuth token from Yandex OAuth :param string jwt_token: Json Web Token, can be generated by :py:meth:`speechkit.generate_jwt` :return: IAM token :rtype: string """ if not type(yandex_passport_oauth_token) in (str, type(None)): raise TypeError("__init__() yandex_passport_oauth_token: got {} but expected \ type is str or None".format(type(yandex_passport_oauth_token).__name__)) if not type(jwt_token) in (str, type(None)): raise TypeError("__init__() jwt_token: got {} but expected \ type is str or None".format(type(jwt_token).__name__)) if (not yandex_passport_oauth_token and not jwt_token) or (yandex_passport_oauth_token and jwt_token): raise ValueError("Includes only one of the fields `yandex_passport_oauth_token`, `jwt_token`") if yandex_passport_oauth_token: data = {'yandexPassportOauthToken': str(yandex_passport_oauth_token)} else: data = {'jwt': str(jwt_token)} url = "https://iam.api.cloud.yandex.net/iam/v1/tokens" answer = requests.post(url, json=data) if answer.ok: return answer.json().get('iamToken') else: raise RequestError(answer.json()) def get_api_key(yandex_passport_oauth_token=None, service_account_id=None, description='Default Api-Key created by `speechkit` python SDK'): """ Creates an API key for the specified service account. :param string yandex_passport_oauth_token: OAuth token from Yandex OAuth :param string service_account_id: The ID of the service account whose key the Api-Key is signed with. :param string description: Description for api-key. Optional. :return: Api-Key :rtype: string """ if not yandex_passport_oauth_token or not service_account_id: raise ValueError("`yandex_passport_oauth_token` and `service_account_id` required.") url = 'https://iam.api.cloud.yandex.net/iam/v1/apiKeys' headers = { 'Authorization': 'Bearer {}'.format(get_iam_token(yandex_passport_oauth_token=yandex_passport_oauth_token)) } data = {'serviceAccountId': service_account_id, 'description': description} answer = requests.post(url, headers=headers, json=data) if answer.ok: return answer.json().get('secret') else: raise RequestError(answer.json()) class Session: """Class provides yandex API authentication.""" IAM_TOKEN = 'iam_token' """Iam_token if iam auth, value: 'iam_token'""" API_KEY = 'api_key' """Api key if api-key auth, value: 'api_key'""" def __init__(self, auth_type, credential, folder_id): """ Stores credentials for given auth method :param string auth_type: Type of auth may be :py:meth:`Session.IAM_TOKEN` or :py:meth:`Session.API_KEY` :param string | None folder_id: Id of the folder that you have access to. Don't specify this field if you make a request on behalf of a service account. :param string credential: Auth key iam or api key """ if auth_type not in (self.IAM_TOKEN, self.API_KEY): raise ValueError( "auth_type must be `Session.IAM_TOKEN` or `Session.API_KEY`, but given {}".format(auth_type) ) self._auth_method = auth_type if not isinstance(credential, str): raise ValueError("_credential must be string, but got {}".format(type(credential))) self._credential = credential self.folder_id = folder_id @classmethod def from_api_key(cls, api_key, folder_id=None): """ Creates session from api key :param string api_key: Yandex Cloud Api-Key :param string | None folder_id: Id of the folder that you have access to. Don't specify this field if you make a request on behalf of a service account. :return: Session instance :rtype: Session """ if not isinstance(api_key, str): raise ValueError("Api-Key must be string, but got {}".format(type(api_key))) if len(api_key) == 0: raise ValueError("Api-Key can not be empty.") if folder_id: if not isinstance(folder_id, str): raise ValueError("folder_id must be string, but got {}".format(type(folder_id))) if len(folder_id) == 0: raise ValueError("folder_id must not be empty.") return cls(cls.API_KEY, api_key, folder_id=folder_id) @classmethod def from_yandex_passport_oauth_token(cls, yandex_passport_oauth_token, folder_id): """ Creates Session from oauth token Yandex account :param string yandex_passport_oauth_token: OAuth token from Yandex.OAuth :param string folder_id: Id of the folder that you have access to. Don't specify this field if you make a request on behalf of a service account. :return: Session instance :rtype: Session """ if not isinstance(yandex_passport_oauth_token, str): raise ValueError( "yandex_passport_oauth_token must be string, but got {}".format(type(yandex_passport_oauth_token)) ) if len(yandex_passport_oauth_token) == 0: raise ValueError("yandex_passport_oauth_token can not be empty.") if not isinstance(folder_id, str): raise ValueError("folder_id must be string, but got {}".format(type(folder_id))) if len(folder_id) == 0: raise ValueError("folder_id must not be empty.") iam_token = get_iam_token(yandex_passport_oauth_token=yandex_passport_oauth_token) return cls(cls.IAM_TOKEN, iam_token, folder_id=folder_id) @classmethod def from_jwt(cls, jwt_token, folder_id=None): """ Creates Session from JWT token :param string jwt_token: JWT :param string | None folder_id: Id of the folder that you have access to. Don't specify this field if you make a request on behalf of a service account. :return: Session instance :rtype: Session """ if not isinstance(jwt_token, str): raise ValueError("jwt_token must be string, but got {}".format(type(jwt_token))) if len(jwt_token) == 0: raise ValueError("jwt_token can not be empty.") if folder_id: if not isinstance(folder_id, str): raise ValueError("folder_id must be string, but got {}".format(type(folder_id))) if len(folder_id) == 0: raise ValueError("folder_id must not be empty.") iam_token = get_iam_token(jwt_token=jwt_token) return cls(cls.IAM_TOKEN, iam_token, folder_id=folder_id) @functools.cached_property def header(self): """ Authentication header. :return: Dict in format `{'Authorization': 'Bearer or Api-Key {iam or api_key}'}` :rtype: dict """ if self._auth_method == self.IAM_TOKEN: return {'Authorization': 'Bearer {iam}'.format(iam=self._credential)} if self._auth_method == self.API_KEY: return {'Authorization': 'Api-Key {api_key}'.format(api_key=self._credential)} @functools.cached_property def streaming_recognition_header(self): """ Authentication header for streaming recognition :return: Tuple in format `('authorization', 'Bearer or Api-Key {iam or api_key}')` :rtype: tuple """ if self._auth_method == self.IAM_TOKEN: return tuple(('authorization', 'Bearer {iam}'.format(iam=self._credential),)) if self._auth_method == self.API_KEY: return tuple(('authorization', 'Api-Key {api_key}'.format(api_key=self._credential),)) @functools.cached_property def auth_method(self): return self._auth_method
39.818548
115
0.661266
1,342
9,875
4.659463
0.129657
0.037422
0.079002
0.099792
0.549016
0.491284
0.443947
0.404126
0.371182
0.342076
0
0.004146
0.242835
9,875
247
116
39.979757
0.831884
0.274532
0
0.271318
1
0
0.192337
0.020667
0
0
0
0
0
1
0.077519
false
0.139535
0.03876
0.007752
0.224806
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
902461739bfb362a727b7d895bd8a110f37690c8
208
py
Python
my_script.py
discdiver/name-equals-main
13192311d4a1eb024714bc6047f508003b84f6a9
[ "BSD-3-Clause" ]
null
null
null
my_script.py
discdiver/name-equals-main
13192311d4a1eb024714bc6047f508003b84f6a9
[ "BSD-3-Clause" ]
null
null
null
my_script.py
discdiver/name-equals-main
13192311d4a1eb024714bc6047f508003b84f6a9
[ "BSD-3-Clause" ]
null
null
null
# my_script print(f"My __name__ is: {__name__}") def i_am_main(): print("I'm main!") def i_am_imported(): print("I'm iported!") if __name__ == "__main__": i_am_main() else: i_am_imported()
14.857143
36
0.634615
34
208
3.147059
0.441176
0.11215
0.11215
0
0
0
0
0
0
0
0
0
0.192308
208
13
37
16
0.636905
0.043269
0
0
0
0
0.279188
0
0
0
0
0
0
1
0.222222
true
0
0.222222
0
0.444444
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
2
9028258e9c880c972e0a8152451b2f3c2180aae9
1,210
py
Python
apps/entry/migrations/0007_update_verbose_names.py
rmdes/tanzawa
d53baa10bd6c217cd18628437a88a43e3bd02b70
[ "Apache-2.0" ]
25
2021-06-13T03:38:44.000Z
2022-03-15T15:53:31.000Z
apps/entry/migrations/0007_update_verbose_names.py
rmdes/tanzawa
d53baa10bd6c217cd18628437a88a43e3bd02b70
[ "Apache-2.0" ]
59
2021-06-12T23:35:06.000Z
2022-03-24T21:40:24.000Z
apps/entry/migrations/0007_update_verbose_names.py
rmdes/tanzawa
d53baa10bd6c217cd18628437a88a43e3bd02b70
[ "Apache-2.0" ]
null
null
null
# Generated by Django 3.2.2 on 2021-06-03 10:12 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("entry", "0006_tentry_t_post_1x1"), ] operations = [ migrations.AlterModelOptions( name="tbookmark", options={"verbose_name": "Bookmark", "verbose_name_plural": "Bookmarks"}, ), migrations.AlterModelOptions( name="tcheckin", options={"verbose_name": "Checkin", "verbose_name_plural": "Checkins"}, ), migrations.AlterModelOptions( name="tentry", options={"verbose_name": "Entry", "verbose_name_plural": "Entries"}, ), migrations.AlterModelOptions( name="tlocation", options={"verbose_name": "Location", "verbose_name_plural": "Locations"}, ), migrations.AlterModelOptions( name="treply", options={"verbose_name": "Reply", "verbose_name_plural": "Replies"}, ), migrations.AlterModelOptions( name="tsyndication", options={"verbose_name": "Syndication URL", "verbose_name_plural": "Syndication URLs"}, ), ]
31.842105
99
0.586777
103
1,210
6.679612
0.456311
0.19186
0.270349
0
0
0
0
0
0
0
0
0.024055
0.278512
1,210
37
100
32.702703
0.764032
0.03719
0
0.387097
1
0
0.315563
0.018917
0
0
0
0
0
1
0
false
0
0.032258
0
0.129032
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
90345498d1466b26c34581ed2ba6f1424a80c06b
1,281
py
Python
basic-example/test.py
tr4r3x/remora
f66e579afe97280419cf9bb7f90f200a5d4ba871
[ "MIT" ]
206
2017-04-21T08:34:53.000Z
2022-02-23T22:29:07.000Z
basic-example/test.py
tr4r3x/remora
f66e579afe97280419cf9bb7f90f200a5d4ba871
[ "MIT" ]
83
2017-05-22T12:31:15.000Z
2021-12-17T00:27:30.000Z
basic-example/test.py
tr4r3x/remora
f66e579afe97280419cf9bb7f90f200a5d4ba871
[ "MIT" ]
43
2017-05-23T16:41:57.000Z
2021-08-24T09:05:36.000Z
#!/usr/bin/python import requests def check(): data = {} consumers = requests.get('http://localhost:9000/consumers').json() for consumer_group in consumers: consumer_infos = requests.get( 'http://localhost:9000/consumers/{consumer_group}'.format( consumer_group=consumer_group)).json() for partition in consumer_infos['partition_assignment']: data[ '{consumer_group}-{topic}-{partition}-lag'.format( consumer_group=consumer_group, topic=partition['topic'], partition=partition['partition'])] = partition['lag'] data[ '{consumer_group}-{topic}-{partition}-log_end_offset'.format( consumer_group=consumer_group, topic=partition['topic'], partition=partition['partition'])] = partition['log_end_offset'] data[ '{consumer_group}-{topic}-{partition}-offset'.format( consumer_group=consumer_group, topic=partition['topic'], partition=partition['partition'])] = partition['offset'] print(data) return data if __name__ == "__main__": check()
31.243902
84
0.5605
113
1,281
6.106195
0.283186
0.244928
0.156522
0.234783
0.684058
0.502899
0.395652
0.395652
0.395652
0.395652
0
0.009081
0.312256
1,281
40
85
32.025
0.77412
0.01249
0
0.321429
0
0
0.242089
0.106013
0
0
0
0
0
1
0.035714
false
0
0.035714
0
0.107143
0.035714
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
9046569ea6c4f25ea30cf85aa9680823a04b3285
371
py
Python
download-deveres/para-execicios-curso-em-video/exe004.py
Hugo-Oliveira-RDO11/meus-deveres
b5e41015e2cb95946262678e82197e5f47d56271
[ "MIT" ]
null
null
null
download-deveres/para-execicios-curso-em-video/exe004.py
Hugo-Oliveira-RDO11/meus-deveres
b5e41015e2cb95946262678e82197e5f47d56271
[ "MIT" ]
null
null
null
download-deveres/para-execicios-curso-em-video/exe004.py
Hugo-Oliveira-RDO11/meus-deveres
b5e41015e2cb95946262678e82197e5f47d56271
[ "MIT" ]
null
null
null
n = input('digite algum : ') print('o tipo primitivo desse valor e :',type(n)) print('isso e um numero ?',n.isnumeric()) print('isso e uma letra ?',n.isalpha()) print('isso e espaço ?',n.isspace()) print('isso e numero e letra ?',n.isalnum()) print('estar em maiusculas ?',n.isupper()) print('estar em minusculas ?',n.islower()) print('estar capitalizada ?',n.istitle())
37.1
49
0.673854
58
371
4.310345
0.517241
0.144
0.16
0
0
0
0
0
0
0
0
0
0.118598
371
9
50
41.222222
0.764526
0
0
0
0
0
0.493261
0
0
0
0
0
0
1
0
false
0
0
0
0
0.888889
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
90482bd1c521592934a45fd58cd0cb43510d2a81
1,046
py
Python
java.py
hhu-stups/python_jvm
57b9e6cbdfae23c2aeac1383b04b2ab9424cea16
[ "MIT" ]
4
2016-10-08T20:23:22.000Z
2018-07-25T10:59:08.000Z
java.py
hhu-stups/python_jvm
57b9e6cbdfae23c2aeac1383b04b2ab9424cea16
[ "MIT" ]
null
null
null
java.py
hhu-stups/python_jvm
57b9e6cbdfae23c2aeac1383b04b2ab9424cea16
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # https://sourceforge.net/projects/javavm/ # http://javavm.svn.sourceforge.net/ import sys, py, os from interp import ClassLoader, Stack from classloader import encode_name, descriptor from helper import make_String from objectmodel import Arrayref # FIXME: Uses the BootstrapClassloader # TODO: Use the AppClassLoader current_dir = os.getcwd() if len(sys.argv)>1: classname = sys.argv[1] loader = ClassLoader([str(current_dir)]) jcls = loader.getclass(classname) main_name = encode_name(u'main', [u'array:reference:java/lang/String', None]) str_list = [] for arg in sys.argv[2:]: str_list.append(make_String(arg, self.loader))#XXX stack = Stack() stack.push(Arrayref(str_list, "", loader.getclass("[Ljava.lang.String;"))) method = jcls.methods[unicode(main_name)] const = jcls.cls.constant_pool descr = descriptor(const[method.descriptor_index]) res = loader.invoke_method(jcls.cls, method, descr, stack) else: print "use python java.py <ClassName,[Args...]>"
37.357143
81
0.712237
143
1,046
5.111888
0.538462
0.028728
0.021888
0
0
0
0
0
0
0
0
0.004499
0.150096
1,046
28
82
37.357143
0.817773
0.1587
0
0
0
0
0.108696
0.060641
0
0
0
0.035714
0
0
null
null
0
0.227273
null
null
0.045455
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
1
0
0
0
0
0
0
0
0
2
5f4af67228bd47b10fd3b8c5f4a9aeb30367f543
862
py
Python
scripts/Chengxi_CSP.py
CooperComputationalCaucus/pyXtal
35a9fe89d69db4ae734d71c1311dc8629cd673b9
[ "MIT" ]
null
null
null
scripts/Chengxi_CSP.py
CooperComputationalCaucus/pyXtal
35a9fe89d69db4ae734d71c1311dc8629cd673b9
[ "MIT" ]
null
null
null
scripts/Chengxi_CSP.py
CooperComputationalCaucus/pyXtal
35a9fe89d69db4ae734d71c1311dc8629cd673b9
[ "MIT" ]
null
null
null
''' Script to pull in set of cif files and make a single dataframe . @author: pmm ''' import numpy as np import os import sys sys.path.append('../') import pyXtal as pxt from pyXtal.csp_utils.dataset_io import parse_filenames import pandas as pd dirs = ['/Users/pmm/Documents/xtal_learning/triptycene/cifs/T2', '/Users/pmm/Documents/xtal_learning/triptycene/cifs/ring3', '/Users/pmm/Documents/xtal_learning/triptycene/cifs/ring32', '/Users/pmm/Documents/xtal_learning/triptycene/cifs/ring34', '/Users/pmm/Documents/xtal_learning/triptycene/cifs/ring39' ] frames=[] for dir in dirs: _df = parse_filenames(dir, keys=['_','Energy','Density']) _df.set_index("Name",inplace=True) frames.append(_df) df = pd.concat(frames) df.to_pickle('/Users/pmm/Documents/xtal_learning/triptycene/cifs/data_triptycene_set')
31.925926
86
0.725058
122
862
4.97541
0.491803
0.079077
0.16804
0.207578
0.425041
0.425041
0.425041
0
0
0
0
0.010782
0.139211
862
27
86
31.925926
0.807278
0.090487
0
0
0
0
0.477477
0.45045
0
0
0
0
0
1
0
false
0
0.3
0
0.3
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
5f50a9645ac7f07839d20424dbed3960646a08f5
464
py
Python
Tests/FileProcessorCopyToAbsolutePathTest.py
CorradoTorino/FilesRecovery
8c7a3b37eece2ef1f5874e2c96f0d004186891b5
[ "MIT" ]
null
null
null
Tests/FileProcessorCopyToAbsolutePathTest.py
CorradoTorino/FilesRecovery
8c7a3b37eece2ef1f5874e2c96f0d004186891b5
[ "MIT" ]
null
null
null
Tests/FileProcessorCopyToAbsolutePathTest.py
CorradoTorino/FilesRecovery
8c7a3b37eece2ef1f5874e2c96f0d004186891b5
[ "MIT" ]
null
null
null
import unittest import os import sys import shutil from FileProcessor import FileProcessor class FileProcessorCopyToAbsolutePathTest(unittest.TestCase): def test_FileProcessorCopyFilesTest_Run_EmptyFolder(self): inputFolder = r"e:" outputFolder = r"d:\Temp\FileProcessorTestOuput" processor = FileProcessor(inputFolder, outputFolder) processor.Run() if __name__ == '__main__': unittest.main()
24.421053
62
0.709052
41
464
7.756098
0.658537
0
0
0
0
0
0
0
0
0
0
0
0.221983
464
19
63
24.421053
0.880886
0
0
0
0
0
0.086022
0.064516
0
0
0
0
0
1
0.076923
false
0
0.384615
0
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
5f5f4043414f3e4a5f137f800ef9a1cdb7e68d1f
314
py
Python
tests/test_basic_functionality.py
aanania/PyXTBClient
f0d70e03ea0a57e6f57fdd8d2ed1e596e732a1a3
[ "MIT" ]
11
2018-09-21T21:30:42.000Z
2021-03-11T08:46:35.000Z
tests/test_basic_functionality.py
aanania/PyXTBClient
f0d70e03ea0a57e6f57fdd8d2ed1e596e732a1a3
[ "MIT" ]
1
2020-04-10T10:47:26.000Z
2020-04-10T10:47:26.000Z
tests/test_basic_functionality.py
aanania/PyXTBClient
f0d70e03ea0a57e6f57fdd8d2ed1e596e732a1a3
[ "MIT" ]
3
2019-03-07T14:07:25.000Z
2020-04-10T15:28:09.000Z
def test_get_symbols(xtb_client): symbols = list(xtb_client.get_all_symbols()) assert len(symbols) > 0 def test_get_balance(xtb_client): balance = xtb_client.get_balance() assert balance.get('balance') is not None def test_ping(xtb_client): response = xtb_client.ping() assert response
22.428571
48
0.729299
46
314
4.673913
0.369565
0.251163
0.093023
0
0
0
0
0
0
0
0
0.003846
0.171975
314
13
49
24.153846
0.823077
0
0
0
0
0
0.022293
0
0
0
0
0
0.333333
1
0.333333
false
0
0
0
0.333333
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
5f7341877ed1d66fdd9eb55a52bb232c68d6d194
327
py
Python
helperBot/Agregator.py
dariakhaetskaya/HelperBot
8b75a9a5b3566a4737320430e36dfd72af559964
[ "MIT" ]
2
2021-11-07T12:04:07.000Z
2021-12-27T15:42:31.000Z
helperBot/Agregator.py
dariakhaetskaya/HelperBot
8b75a9a5b3566a4737320430e36dfd72af559964
[ "MIT" ]
null
null
null
helperBot/Agregator.py
dariakhaetskaya/HelperBot
8b75a9a5b3566a4737320430e36dfd72af559964
[ "MIT" ]
null
null
null
class AgregationController: def __init__(self): """ """ def subscribe(self, url): """ :param url: URL :return: boolean """ def notify(self, message, id): """ :param message: Message :param id: List<TgUID> :return: void """
16.35
34
0.458716
28
327
5.214286
0.571429
0
0
0
0
0
0
0
0
0
0
0
0.409786
327
20
35
16.35
0.756477
0.284404
0
0
0
0
0
0
0
0
0
0
0
1
0.75
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
5f7fb5a89cca7375241b7977a337f1f832d5b6dd
77
py
Python
circuitbreaker/states.py
postmates/circuitbreaker
91e5f2547740b193efbcd6a850380bf72e9d422a
[ "BSD-3-Clause" ]
null
null
null
circuitbreaker/states.py
postmates/circuitbreaker
91e5f2547740b193efbcd6a850380bf72e9d422a
[ "BSD-3-Clause" ]
1
2019-06-17T20:43:58.000Z
2019-06-17T20:43:58.000Z
circuitbreaker/states.py
postmates/circuitbreaker
91e5f2547740b193efbcd6a850380bf72e9d422a
[ "BSD-3-Clause" ]
null
null
null
STATE_CLOSED = b'closed' STATE_OPEN = b'open' STATE_HALF_OPEN = b'half-open'
19.25
30
0.753247
14
77
3.857143
0.357143
0.185185
0
0
0
0
0
0
0
0
0
0
0.116883
77
3
31
25.666667
0.794118
0
0
0
0
0
0.246753
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
5f8c6a062d460268c543c1d201d3fd2198c40668
2,496
py
Python
book_manage/models.py
r-e-d-ant/Library-books-management
37c7970737942956d14e8b0e2878981f1af4953e
[ "MIT" ]
1
2021-03-14T19:22:33.000Z
2021-03-14T19:22:33.000Z
book_manage/models.py
r-e-d-ant/library-books-management
37c7970737942956d14e8b0e2878981f1af4953e
[ "MIT" ]
1
2021-12-16T11:32:36.000Z
2021-12-17T07:51:31.000Z
book_manage/models.py
r-e-d-ant/library-books-management
37c7970737942956d14e8b0e2878981f1af4953e
[ "MIT" ]
1
2021-08-12T15:22:11.000Z
2021-08-12T15:22:11.000Z
from datetime import datetime from book_manage import db, login_manager from flask_login import UserMixin @login_manager.user_loader def load_user(user_id): return Admin.query.get(int(user_id)) # This class to hold Admins who uploads books. class Admin(db.Model, UserMixin): id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(100), nullable=False) email = db.Column(db.String(100), nullable=False) password = db.Column(db.String(100), nullable=False) # create relationship with Uploaded Table. posts = db.relationship('Uploaded', backref='auth', lazy=True) def __repr__(self): return f"Admin('{self.username}', '{self.email}')" # This is a table where books gonna be saved. class Uploaded(db.Model): id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100), nullable=False) author = db.Column(db.String(10)) description = db.Column(db.Text) # Here db.ForeignKey('user.id'), mean that he have a relationship to our user model. user_id = db.Column(db.Integer, db.ForeignKey('admin.id'), nullable=False) def __repr__(self): return f"Uploaded('{self.title}', '{self.author}', '{self.description}')" # This is a table where all the borrowed books info # gonna be saved. class Borrowed(db.Model): id = db.Column(db.Integer, primary_key=True) customer = db.Column(db.String(16), nullable=False) title = db.Column(db.String(100), nullable=False) author = db.Column(db.String(10)) borrow_date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) return_date = db.Column(db.DateTime, nullable=False) def __repr__(self): return f"Borrowed('{self.customer}', '{self.title}', '{self.author}', '{self.borrow_date}', '{self.return_date}')" # This is a table to save users who borrow the book. # And information of that book. class Returned(db.Model): id = db.Column(db.Integer, primary_key=True) customer_name = db.Column(db.String(16), nullable=False) customer_id = db.Column(db.Integer, nullable=False) book_title = db.Column(db.String(100), nullable=False) book_author = db.Column(db.String(100), nullable=False) borrowed_date = db.Column(db.DateTime, nullable=False) returned_date = db.Column(db.DateTime, nullable=False) def __repr__(self): return f"Returned('{self.customer_name}', '{self.book_title}', '{self.borrowed_date}', '{self.returned_date}')"
37.253731
122
0.695112
360
2,496
4.7
0.244444
0.104019
0.130024
0.104019
0.512411
0.428487
0.428487
0.256501
0.21513
0.21513
0
0.013902
0.164263
2,496
66
123
37.818182
0.79722
0.144231
0
0.292683
0
0.04878
0.15428
0.101599
0
0
0
0
0
1
0.121951
false
0.02439
0.073171
0.121951
0.97561
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
5f8ee46bb80fdf5e38fad6ac803940bc24b4636d
859
py
Python
asylum/zfs.py
cieplak/asylum
12f95d6b42dc441f41aa7e7cc9791675b01d3682
[ "MIT" ]
null
null
null
asylum/zfs.py
cieplak/asylum
12f95d6b42dc441f41aa7e7cc9791675b01d3682
[ "MIT" ]
null
null
null
asylum/zfs.py
cieplak/asylum
12f95d6b42dc441f41aa7e7cc9791675b01d3682
[ "MIT" ]
null
null
null
from asylum.console import Console class Zfs(Console): @classmethod def create_pool(cls, device, name): cmd = ' '.join(['zpool', 'create', name, device]) return cls.run(cmd) @classmethod def create_directory(cls, pool_path, filesystem_path): cmd = ' '.join(['zfs', 'create', '-o', 'mountpoint={}'.format(filesystem_path), pool_path]) return cls.run(cmd) @classmethod def create(cls, pool_path): cmd = ' '.join(['zfs', 'create', pool_path]) return cls.run(cmd) @classmethod def snapshot(cls, pool_path, version): cmd = ' '.join(['zfs', 'snapshot', '{}@{}'.format(pool_path, version)]) return cls.run(cmd) @classmethod def clone(cls, snapshot, destination): cmd = ' '.join(['zfs', 'clone', snapshot, destination]) return cls.run(cmd)
28.633333
99
0.597206
100
859
5.03
0.28
0.095427
0.119284
0.149105
0.365805
0.286282
0.228628
0.147117
0
0
0
0
0.235157
859
29
100
29.62069
0.765601
0
0
0.454545
0
0
0.084983
0
0
0
0
0
0
1
0.227273
false
0
0.045455
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
5f9275af7f2403af1a15ebcaf52d1f54756b42d9
320
py
Python
Aula6/FindSqrt.py
Anderson842/LabADAGrupoC
f0cb79968b994eeafe562046f4bd87c0d4c07fb6
[ "BSD-3-Clause" ]
null
null
null
Aula6/FindSqrt.py
Anderson842/LabADAGrupoC
f0cb79968b994eeafe562046f4bd87c0d4c07fb6
[ "BSD-3-Clause" ]
null
null
null
Aula6/FindSqrt.py
Anderson842/LabADAGrupoC
f0cb79968b994eeafe562046f4bd87c0d4c07fb6
[ "BSD-3-Clause" ]
null
null
null
#Verificar si un numero es cuadrado #sin usar metodo sqrt def findSqrt (num): l=0 r=num-1 while l<=r: mid = l+(r-l)//2 if mid*mid == num: return True if mid*mid < num: l=mid+1 if mid*mid > num: r=mid-1 return False print(findSqrt(14))
18.823529
35
0.5
51
320
3.137255
0.509804
0.09375
0.15
0.20625
0
0
0
0
0
0
0
0.035714
0.3875
320
17
36
18.823529
0.780612
0.16875
0
0
0
0
0
0
0
0
0
0.058824
0
1
0.076923
false
0
0
0
0.230769
0.076923
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
2
5f9c5332af1752a1320d26b14ef781fe8c9fc2e5
724
py
Python
YandexPracticum/contest/sprint11/function_value.py
EkaterinaFedorova/prepearing
ccc4fb388c918d942c514a7e0375558c72a72df5
[ "MIT" ]
null
null
null
YandexPracticum/contest/sprint11/function_value.py
EkaterinaFedorova/prepearing
ccc4fb388c918d942c514a7e0375558c72a72df5
[ "MIT" ]
null
null
null
YandexPracticum/contest/sprint11/function_value.py
EkaterinaFedorova/prepearing
ccc4fb388c918d942c514a7e0375558c72a72df5
[ "MIT" ]
null
null
null
# Вася делает тест по математике: вычисляет значение функций в различных точках. # Стоит отличная погода, и друзья зовут Васю гулять. # Но мальчик решил сначала закончить тест и только после этого идти к друзьям. # К сожалению, Вася пока не умеет программировать. Зато вы умеете. # Помогите Васе написать код функции, вычисляющей y = ax2 + bx + c. # Напишите программу, которая будет по коэффициентам a, b, c и числу x выводить значение функции в точке x. # -8 -5 -2 7 def get_func_result(a, x, b, c ): return a*(x**2) + b*x + c def read_input(): return [int(item) for item in input().split()] if __name__ == "__main__": a, x, b, c = read_input() result = get_func_result(a, x, b, c) print(result)
32.909091
107
0.697514
119
724
4.12605
0.663866
0.016293
0.01833
0.02444
0.069246
0.069246
0.069246
0
0
0
0
0.010399
0.203039
724
21
108
34.47619
0.840555
0.627072
0
0
0
0
0.030534
0
0
0
0
0
0
1
0.25
false
0
0
0.25
0.5
0.125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
5fad83dc6bdeba4f03736ee91e479efc987f12c9
258
py
Python
chaewon/week15/Z.py
sio2whocodes/kings-algorithm-study
ef0a0def1bb1a3c09c4a44b93061f72a196c88fe
[ "MIT" ]
null
null
null
chaewon/week15/Z.py
sio2whocodes/kings-algorithm-study
ef0a0def1bb1a3c09c4a44b93061f72a196c88fe
[ "MIT" ]
null
null
null
chaewon/week15/Z.py
sio2whocodes/kings-algorithm-study
ef0a0def1bb1a3c09c4a44b93061f72a196c88fe
[ "MIT" ]
null
null
null
import sys def solution(n, r, c): if n == 0: return 0 return 2 * (r % 2) + (c % 2) + 4 * solution(n - 1, int(r / 2), int(c / 2)) if __name__ == "__main__": n, r, c = map(int, sys.stdin.readline().split()) print(solution(n, r, c))
19.846154
78
0.503876
45
258
2.711111
0.466667
0.221311
0.07377
0.180328
0
0
0
0
0
0
0
0.04918
0.290698
258
12
79
21.5
0.617486
0
0
0
0
0
0.031008
0
0
0
0
0
0
1
0.125
false
0
0.125
0
0.5
0.125
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
5fbe5544a5869f3c50d6b13727ec784ae324c003
275
py
Python
python_data_analysis/pandas/demo1.py
MiracleWong/MoocStudy
e22c6e69b77b98b6d71b52d90321aa442d726ffa
[ "MIT" ]
null
null
null
python_data_analysis/pandas/demo1.py
MiracleWong/MoocStudy
e22c6e69b77b98b6d71b52d90321aa442d726ffa
[ "MIT" ]
null
null
null
python_data_analysis/pandas/demo1.py
MiracleWong/MoocStudy
e22c6e69b77b98b6d71b52d90321aa442d726ffa
[ "MIT" ]
null
null
null
#!/usr/bin/python #-*- coding:utf8 -*- import pandas as pd b = pd.Series([9,8,7,6],index=['a','b','c','d']) s = pd.Series(25,index=['a','b','c']) d = pd.Series({'a':9, 'b':8, 'c':7}) e = pd.Series({'a':9, 'b':8, 'c':7}, index=['c', 'a', 'b','d']) print b a = s + d print a
22.916667
63
0.483636
59
275
2.254237
0.389831
0.240602
0.105263
0.120301
0.345865
0.210526
0.210526
0.210526
0
0
0
0.054852
0.138182
275
12
64
22.916667
0.506329
0.127273
0
0
0
0
0.07113
0
0
0
0
0
0
0
null
null
0
0.125
null
null
0.25
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
5fc327c65eabb2a591b39ca253820d046ef827b1
2,622
py
Python
forms.py
fyabc/DBLab02
8ea967337107e29fcd0e171eb418a40e8abe7d9e
[ "MIT" ]
1
2016-06-03T09:37:04.000Z
2016-06-03T09:37:04.000Z
forms.py
fyabc/DBLab02
8ea967337107e29fcd0e171eb418a40e8abe7d9e
[ "MIT" ]
null
null
null
forms.py
fyabc/DBLab02
8ea967337107e29fcd0e171eb418a40e8abe7d9e
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- __author__ = 'fyabc' from flask.ext.wtf import Form from wtforms import StringField, SubmitField, SelectField, PasswordField, IntegerField from wtforms.validators import DataRequired, NumberRange, Length # Local modules. from config import TableNames class SignInForm(Form): userID = StringField('用户ID', validators=[DataRequired()]) userName = StringField('用户名', validators=[DataRequired()]) password = PasswordField( '密码', validators=[DataRequired(), Length(min=6, message='密码长度不得少于6个字符。')]) submit = SubmitField('注册') class QueryForm(Form): type = SelectField('查询类型', coerce=str, choices=TableNames) queryName = StringField('查询主键名称', default='') submit = SubmitField('查询') class LoginForm(Form): userName = StringField('账号', validators=[DataRequired()]) password = PasswordField('密码', validators=[DataRequired()]) submit = SubmitField('登录') myUserName = 'fyabc' myPassword = 'fy95102' class ReserveForm(Form): customerID = StringField('用户编号', validators=[DataRequired()]) reserveType = SelectField('预订类型', coerce=int, choices=[ (1, '航班'), (2, '宾馆'), (3, '出租车') ]) reserveKey = StringField('预订名称', validators=[DataRequired()]) submit = SubmitField('预订') class UnsubscribeForm(Form): reservationID = IntegerField('预订编号', validators=[DataRequired()]) submit = SubmitField('退订') class InsertForm(Form): type = SelectField('插入类型', coerce=str, choices=[name for name in TableNames if name[0] != 'Reservations']) primaryKey = StringField('主键名称', validators=[DataRequired()]) price = IntegerField('价格', validators=[NumberRange(min=1, max=524287)]) numTotal = IntegerField('数量', validators=[NumberRange(min=1, max=1023)]) password = StringField('密码') fromCity = StringField('出发城市') toCity = StringField('目的城市') customerName = StringField('用户名称') submit = SubmitField('插入记录') class DeleteForm(Form): type = SelectField('删除类型', coerce=str, choices=[name for name in TableNames]) primaryKey = StringField('主键名称', validators=[DataRequired()]) submit = SubmitField('删除记录') class RouteQueryForm(Form): fromCity = StringField('出发城市', validators=[DataRequired()]) toCity = StringField('目的城市', validators=[DataRequired()]) submit = SubmitField('查询线路') class CustomerQueryForm(Form): IDNumber = StringField('用户ID') customerName = StringField('用户名称') submit = SubmitField('查询用户')
32.37037
110
0.650648
240
2,622
7.091667
0.441667
0.155112
0.082256
0.114571
0.264395
0.124559
0.124559
0.045828
0
0
0
0.011538
0.206712
2,622
80
111
32.775
0.806731
0.01373
0
0.071429
0
0
0.063492
0
0
0
0
0
0
1
0
false
0.089286
0.071429
0
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
5fcacb311170f64cd246a960dc00ca8b56afbb7a
596
py
Python
pom/base.py
libingluan/project
7095395486dd2b8f98eb6d1d6d2330ff24c94866
[ "MIT" ]
null
null
null
pom/base.py
libingluan/project
7095395486dd2b8f98eb6d1d6d2330ff24c94866
[ "MIT" ]
null
null
null
pom/base.py
libingluan/project
7095395486dd2b8f98eb6d1d6d2330ff24c94866
[ "MIT" ]
null
null
null
from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support import expected_conditions as EC class Browser_init(): # 1 解决浏览器driver问题 #无论是po层还是base都是用测试用例 def __init__(self,driver): self.driver= driver #继承: #父类有构造方法,子类没有构造方法,示例化子类对象时,自动调用父类的构造方法 # 2、封装常见的方法:打开浏览器、打开url、元素定位、元素等待、输入 def Wait_ele(self,locator,time=10): # locator是元组定位 wait = WebDriverWait(self.driver,time) ele = wait.until(EC.presence_of_element_locator(locator),message=" 没有该元素") return ele
24.833333
86
0.669463
67
596
5.80597
0.656716
0.077121
0.107969
0.143959
0
0
0
0
0
0
0
0.008869
0.243289
596
23
87
25.913043
0.853659
0.204698
0
0
0
0
0.012931
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
397e43810c2d5f1f57b38a7acbfd697a808c9edf
188
py
Python
Mundo1/Desafio32.py
David-M-Dias/Exercicios-do-curso-de-Python
14f818243f231738e9d47d452f778fb3bace93c0
[ "MIT" ]
1
2021-01-20T13:32:58.000Z
2021-01-20T13:32:58.000Z
Mundo1/Desafio32.py
David-M-Dias/Exercicios-do-curso-de-Python
14f818243f231738e9d47d452f778fb3bace93c0
[ "MIT" ]
null
null
null
Mundo1/Desafio32.py
David-M-Dias/Exercicios-do-curso-de-Python
14f818243f231738e9d47d452f778fb3bace93c0
[ "MIT" ]
null
null
null
ano = int(input('Digite um ano: ')) if (ano%4==0 and ano%100!=0) or (ano%400==0): print('O ano de {} é Bissexto!'.format(ano)) else: print('O ano de {} NÃO é bissexto'.format(ano))
37.6
51
0.601064
36
188
3.138889
0.555556
0.106195
0.159292
0.19469
0
0
0
0
0
0
0
0.064516
0.175532
188
5
51
37.6
0.664516
0
0
0
0
0
0.338624
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
397e9d7b8a5198ca865ec2302382c401de3ee8ab
611
py
Python
CGI/simple-server-with-different-languages/cgi-bin/download.py
whitmans-max/python-examples
881a8f23f0eebc76816a0078e19951893f0daaaa
[ "MIT" ]
140
2017-02-21T22:49:04.000Z
2022-03-22T17:51:58.000Z
CGI/simple-server-with-different-languages/cgi-bin/download.py
whitmans-max/python-examples
881a8f23f0eebc76816a0078e19951893f0daaaa
[ "MIT" ]
5
2017-12-02T19:55:00.000Z
2021-09-22T23:18:39.000Z
CGI/simple-server-with-different-languages/cgi-bin/download.py
whitmans-max/python-examples
881a8f23f0eebc76816a0078e19951893f0daaaa
[ "MIT" ]
79
2017-01-25T10:53:33.000Z
2022-03-11T16:13:57.000Z
#!/usr/bin/env python import os import sys fullpath = 'images/normal.png' filename = 'hello_world.png' # headers print 'Content-Type: application/octet-stream; name="%s"' % filename print 'Content-Disposition: attachment; filename="%s"' % filename print "Content-Length: " + str(os.stat(fullpath).st_size) print # empty line between headers and body #sys.stdout.flush() # send header faster try: # body with open(fullpath, 'rb') as fo: print fo.read() except Exception as e: print 'Content-type:text/html' print # empty line between headers and body print 'Exception:', e
25.458333
68
0.693944
85
611
4.964706
0.611765
0.113744
0.075829
0.099526
0.165877
0.165877
0.165877
0
0
0
0
0
0.180033
611
23
69
26.565217
0.842315
0.234043
0
0.133333
0
0
0.383948
0.101952
0
0
0
0
0
0
null
null
0
0.133333
null
null
0.533333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
3980bcba3c0b08f4d07a8a4f6bc8131263886629
2,069
py
Python
website/views/register.py
Aurelian-Shuttleworth/snitch
f5ee6c02bb5597b278b488436df9713b2f3da4e1
[ "MIT" ]
null
null
null
website/views/register.py
Aurelian-Shuttleworth/snitch
f5ee6c02bb5597b278b488436df9713b2f3da4e1
[ "MIT" ]
2
2021-06-28T13:16:00.000Z
2021-06-28T17:30:59.000Z
website/views/register.py
Aurelian-Shuttleworth/snitch
f5ee6c02bb5597b278b488436df9713b2f3da4e1
[ "MIT" ]
1
2021-06-28T13:17:53.000Z
2021-06-28T13:17:53.000Z
from flask import make_response, request, render_template, redirect from flask.helpers import url_for from models.session import Session from models.user import User from werkzeug.security import generate_password_hash class RegisterView: RESULT_SUCCESS = "success" RESULT_USEREXISTS = "userexists" RESULT_CREATEFAILED = "createfailed" def __init__(self): self.session = Session(request.cookies.get("session_id")) self.view_model = { "logged_in": self.session.logged_in, "username": self.session.username, "result": self.result_string(), } def render(self): if request.method == "GET": return self.get() if request.method == "POST": return self.post() def get(self): response = make_response(render_template("register.html", vm=self.view_model)) response.set_cookie("session_id", self.session.session_id, httponly=True, secure=True) return response def post(self): response_url = url_for(".register") + "?result={result}" result = self.RESULT_SUCCESS # TODO: Validate username requirements user = User(request.form["username"]) if user.user_exists(): result = self.RESULT_USEREXISTS return redirect(response_url.format(result=result)) # TODO: Validate password requirements user.password = generate_password_hash(request.form["password"], salt_length=16) if not user.create_new_user(): result = self.RESULT_CREATEFAILED return redirect(response_url.format(result=result)) def result_string(self): result = request.args.get("result") if result is None: return None if result == self.RESULT_SUCCESS: return "registration_successful" if result == self.RESULT_USEREXISTS: return "Username already exists." if result == self.RESULT_CREATEFAILED: return "Failed to create user." return "Unknown error."
33.370968
94
0.649589
232
2,069
5.616379
0.310345
0.061397
0.085955
0.041443
0.158097
0.066002
0.066002
0
0
0
0
0.001296
0.254229
2,069
61
95
33.918033
0.843163
0.035283
0
0.042553
0
0
0.11139
0.01154
0
0
0
0.016393
0
1
0.106383
false
0.042553
0.106383
0
0.510638
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
39881124fe6ddbcf8ecc3857a33439a9fbbb0e8b
1,935
py
Python
res/005-strings.py
leialbert/keep-learning-python
7bbf2226e6e99e87661f15ea46e6149b61d9912f
[ "MIT" ]
null
null
null
res/005-strings.py
leialbert/keep-learning-python
7bbf2226e6e99e87661f15ea46e6149b61d9912f
[ "MIT" ]
null
null
null
res/005-strings.py
leialbert/keep-learning-python
7bbf2226e6e99e87661f15ea46e6149b61d9912f
[ "MIT" ]
null
null
null
my_string = 'Hello world' print(my_string) my_string = """my sting\ hello world """ print(my_string) my_string = 'hello world' char = my_string[-2] print(char) # my_string[0] = 'ZZ' substring = my_string[1:5] print(substring) substring = my_string[::2] print(substring) substring = my_string[::-1] print(substring) greeting = 'Hello' name = 'albert' sentence = greeting + name print(sentence) for i in sentence: print(i) if 'e' in sentence: print('yes') else: print('no') my_string = ' hello world ' print(my_string) my_string_new = my_string.strip() print(my_string) print(my_string_new) my_string = 'hello world' print(my_string.upper()) print(my_string.startswith('H')) print(my_string.endswith('d')) print(my_string.find('o')) print(my_string.count('o')) print(my_string.replace('world','world!')) # lists and strings my_string = 'how you are doing' my_list = my_string.split() print(my_list) my_string = 'how,you,are,doing' my_list = my_string.split(',') print(my_list) new_string = ''.join(my_list) print(new_string) new_string = ' '.join(my_list) print(new_string) from timeit import default_timer as timer my_list = ['a'] * 10000 # print(my_list) my_string = '' start = timer() for i in my_list: my_string += i stop = timer() print(stop-start) start = timer() my_string2 =''.join(my_list) stop = timer() print(stop-start) # % .format, f-string var = 'Tom' my_string = 'The variable is %s' %var print(my_string) var = 3 my_string = 'The variable is %d' %var print(my_string) var = 3.4 my_string = 'The variable is %d' %var print(my_string) var = 3.4 my_string = 'The variable is %.2f' %var print(my_string) var = 3.4 my_string = 'The variable is {:.2f}'.format(var) print(my_string) var = 3.4 var2 =6 my_string = 'The variable is {:.2f} and {}'.format(var,var2) print(my_string) var = 3.422334 var2 =6 my_string = f'The variable is {var*2:.2f} and {var2}' print(my_string)
17.276786
60
0.690956
321
1,935
3.975078
0.221184
0.269592
0.183386
0.089342
0.559561
0.413009
0.37931
0.314263
0.262539
0.201411
0
0.023752
0.151421
1,935
111
61
17.432432
0.75335
0.037209
0
0.45122
0
0
0.170705
0
0
0
0
0
0
1
0
false
0
0.012195
0
0.012195
0.390244
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
39a3eb03b893e74fa25a6f5f4b9673eff2eb45bd
453
py
Python
api/schemas/couriers_ids.py
syth0le/REST-API_YANDEX
7a693430973e4d0ae428860d17fc33504dc25fb2
[ "MIT" ]
null
null
null
api/schemas/couriers_ids.py
syth0le/REST-API_YANDEX
7a693430973e4d0ae428860d17fc33504dc25fb2
[ "MIT" ]
null
null
null
api/schemas/couriers_ids.py
syth0le/REST-API_YANDEX
7a693430973e4d0ae428860d17fc33504dc25fb2
[ "MIT" ]
null
null
null
from marshmallow_sqlalchemy import ModelSchema from api.models.couriers import * from marshmallow import fields class CouriersIds(ModelSchema): class Meta(ModelSchema.Meta): model = Couriers fields = ['id'] sqla_session = db.session id = fields.Integer(required=True) # @post_dump # def change(self, data, **kwargs): # data['id'] = data['order_id'] # del data['order_id'] # return data
25.166667
46
0.644592
52
453
5.519231
0.576923
0.10453
0.076655
0
0
0
0
0
0
0
0
0
0.247241
453
18
47
25.166667
0.841642
0.262693
0
0
0
0
0.006079
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
39abd5ff966a14b37b8d33f1732fc36cb9478975
1,787
py
Python
password_gen.py
Tugra-Guldogan/password-generator
308226ffdc4532144578755ab3165a96e4d83649
[ "MIT" ]
1
2021-07-26T18:47:01.000Z
2021-07-26T18:47:01.000Z
password_gen.py
Tugra-Guldogan/password-generator
308226ffdc4532144578755ab3165a96e4d83649
[ "MIT" ]
null
null
null
password_gen.py
Tugra-Guldogan/password-generator
308226ffdc4532144578755ab3165a96e4d83649
[ "MIT" ]
null
null
null
import random numbers = ['1','2','3','4','5','6','7','8''9','0'] letters = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"] small = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","z"] characters = ["#","!","@","$","%","^","&","*","(",")","-","_","/",":",":","{","}","[","]","+","="] l = random.choice(small) x = random.choice(letters) y = random.choice(numbers) z = random.choice(characters) sq = 0 num = 0 q = random.choice(small) w = random.choice(letters) e = random.choice(numbers) r = random.choice(characters) t = random.choice(small) u = random.choice(letters) i = random.choice(numbers) o = random.choice(characters) p = random.choice(small) g = random.choice(letters) h = random.choice(numbers) j = random.choice(characters) m = (input('Press enter to continue :')) num= random.randint(0,99) if num<100: if num%2==0: sp= random.choice(numbers) jd = random.choice(characters) print("Your password : ",l+x+y+z+q+w+e+r+t+u+i+o+p+g+h+j+sp+jd) else: print("Your password : ","h",l+x+y+z+q+w+e+r+t+u+i+o+p+g+h+j+l+q+w+r+t+u+y) elif num>100: if num % 2 == 0: io= random.choice(characters) pk = random.choice(characters) print("Your password : ", l + x + y + z + q + w + e + u + i + o + p + g + h + j + pk + io) else: gh= random.choice(numbers) ab = random.choice(letters) print("Your password : ", l + x + y + z + q + w + e + u + i + o + p + g + h + j + gh+ ab) else: uh = random.choice(numbers) ib = random.choice(letters) print("Your password : ", l + x + y + z + q + w + e + u + i + o + p + g + h + j + uh + ib + t + h + q + l + j)
30.810345
139
0.50028
311
1,787
2.871383
0.215434
0.322508
0.148936
0.022396
0.340426
0.340426
0.31131
0.31131
0.31131
0.31131
0
0.017446
0.198097
1,787
57
140
31.350877
0.605722
0
0
0.111111
0
0
0.108562
0
0
0
0
0
0
1
0
false
0.111111
0.022222
0
0.022222
0.111111
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
39b28ba5805fe3ef37c36928db95b96b83ae5b20
12,719
py
Python
pysnmp-with-texts/AT-DHCPSN-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
8
2019-05-09T17:04:00.000Z
2021-06-09T06:50:51.000Z
pysnmp-with-texts/AT-DHCPSN-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
4
2019-05-31T16:42:59.000Z
2020-01-31T21:57:17.000Z
pysnmp-with-texts/AT-DHCPSN-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module AT-DHCPSN-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/AT-DHCPSN-MIB # Produced by pysmi-0.3.4 at Wed May 1 11:29:42 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion") modules, = mibBuilder.importSymbols("AT-SMI-MIB", "modules") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") TimeTicks, Integer32, MibIdentifier, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Counter32, ModuleIdentity, Counter64, Unsigned32, ObjectIdentity, Bits, Gauge32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "Integer32", "MibIdentifier", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Counter32", "ModuleIdentity", "Counter64", "Unsigned32", "ObjectIdentity", "Bits", "Gauge32", "NotificationType") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") atDhcpsn = ModuleIdentity((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537)) atDhcpsn.setRevisions(('2010-09-07 00:00', '2010-06-14 04:45', '2010-02-09 01:30', '2009-12-10 01:30',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: atDhcpsn.setRevisionsDescriptions(('Generic syntax tidy up', 'MIB revision history dates in descriptions updated.', 'This MIB file contains definitions of managed objects for DHCP Snooping in AlliedWare Plus.', 'Initial Revision',)) if mibBuilder.loadTexts: atDhcpsn.setLastUpdated('201009070000Z') if mibBuilder.loadTexts: atDhcpsn.setOrganization('Allied Telesis, Inc') if mibBuilder.loadTexts: atDhcpsn.setContactInfo('http://www.alliedtelesis.com') if mibBuilder.loadTexts: atDhcpsn.setDescription('Added two more violation types for DHCP Snooping.') atDhcpsnEvents = MibIdentifier((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 0)) atDhcpsnTrap = NotificationType((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 0, 1)).setObjects(("AT-DHCPSN-MIB", "atDhcpsnIfIndex"), ("AT-DHCPSN-MIB", "atDhcpsnVid"), ("AT-DHCPSN-MIB", "atDhcpsnSmac"), ("AT-DHCPSN-MIB", "atDhcpsnOpcode"), ("AT-DHCPSN-MIB", "atDhcpsnCiaddr"), ("AT-DHCPSN-MIB", "atDhcpsnYiaddr"), ("AT-DHCPSN-MIB", "atDhcpsnGiaddr"), ("AT-DHCPSN-MIB", "atDhcpsnSiaddr"), ("AT-DHCPSN-MIB", "atDhcpsnChaddr"), ("AT-DHCPSN-MIB", "atDhcpsnVioType")) if mibBuilder.loadTexts: atDhcpsnTrap.setStatus('current') if mibBuilder.loadTexts: atDhcpsnTrap.setDescription('DHCP Snooping violation trap.') atArpsecTrap = NotificationType((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 0, 2)).setObjects(("AT-DHCPSN-MIB", "atArpsecIfIndex"), ("AT-DHCPSN-MIB", "atArpsecClientIP"), ("AT-DHCPSN-MIB", "atArpsecSrcMac"), ("AT-DHCPSN-MIB", "atArpsecVid"), ("AT-DHCPSN-MIB", "atArpsecVioType")) if mibBuilder.loadTexts: atArpsecTrap.setStatus('current') if mibBuilder.loadTexts: atArpsecTrap.setDescription('DHCP Snooping ARP Security violation trap.') atDhcpsnVariablesTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1), ) if mibBuilder.loadTexts: atDhcpsnVariablesTable.setStatus('current') if mibBuilder.loadTexts: atDhcpsnVariablesTable.setDescription('This table contains rows of DHCP Snooping information.') atDhcpsnVariablesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1), ).setIndexNames((0, "AT-DHCPSN-MIB", "atDhcpsnIfIndex")) if mibBuilder.loadTexts: atDhcpsnVariablesEntry.setStatus('current') if mibBuilder.loadTexts: atDhcpsnVariablesEntry.setDescription('A set of parameters that describe the DHCP Snooping features.') atDhcpsnIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnIfIndex.setStatus('current') if mibBuilder.loadTexts: atDhcpsnIfIndex.setDescription('Ifindex of the port that the packet was received on.') atDhcpsnVid = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnVid.setStatus('current') if mibBuilder.loadTexts: atDhcpsnVid.setDescription('VLAN ID of the port that the packet was received on.') atDhcpsnSmac = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnSmac.setStatus('current') if mibBuilder.loadTexts: atDhcpsnSmac.setDescription('Source MAC address of the packet that caused the trap.') atDhcpsnOpcode = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("bootpRequest", 1), ("bootpReply", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnOpcode.setStatus('current') if mibBuilder.loadTexts: atDhcpsnOpcode.setDescription('Opcode value of the BOOTP packet that caused the trap. Only bootpRequest(1) or bootpReply(2) is valid.') atDhcpsnCiaddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 5), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnCiaddr.setStatus('current') if mibBuilder.loadTexts: atDhcpsnCiaddr.setDescription('Ciaddr value of the BOOTP packet that caused the trap.') atDhcpsnYiaddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 6), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnYiaddr.setStatus('current') if mibBuilder.loadTexts: atDhcpsnYiaddr.setDescription('Yiaddr value of the BOOTP packet that caused the trap.') atDhcpsnGiaddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 7), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnGiaddr.setStatus('current') if mibBuilder.loadTexts: atDhcpsnGiaddr.setDescription('Giaddr value of the BOOTP packet that caused the trap.') atDhcpsnSiaddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 8), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnSiaddr.setStatus('current') if mibBuilder.loadTexts: atDhcpsnSiaddr.setDescription('Siaddr value of the BOOTP packet that caused the trap.') atDhcpsnChaddr = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 9), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnChaddr.setStatus('current') if mibBuilder.loadTexts: atDhcpsnChaddr.setDescription('Chaddr value of the BOOTP packet that caused the trap.') atDhcpsnVioType = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("invalidBootp", 1), ("invalidDhcpAck", 2), ("invalidDhcpRelDec", 3), ("invalidIp", 4), ("maxBindExceeded", 5), ("opt82InsertErr", 6), ("opt82RxInvalid", 7), ("opt82RxUntrusted", 8), ("opt82TxUntrusted", 9), ("replyRxUntrusted", 10), ("srcMacChaddrMismatch", 11), ("staticEntryExisted", 12), ("dbAddErr", 13)))).setMaxAccess("readonly") if mibBuilder.loadTexts: atDhcpsnVioType.setStatus('current') if mibBuilder.loadTexts: atDhcpsnVioType.setDescription('The reason that the trap was generated. invalidBootp(1) indicates that the received BOOTP packet was invalid. For example, it is neither BootpRequest nor BootpReply. invalidDhcpAck(2) indicates that the received DHCP ACK was invalid. invalidDhcpRelDec(3) indicates the DHCP Release or Decline was invalid. invalidIp(4) indicates that the received IP packet was invalid. maxBindExceeded(5) indicates that if the entry was added, the maximum bindings configured for the port would be exceeded. opt82InsertErr(6) indicates that the insertion of Option 82 failed. opt82RxInvalid(7) indicates that the received Option 82 information was invalid. opt82RxUntrusted(8) indicates that Option 82 information was received on an untrusted port. opt82TxUntrusted(9) indicates that Option 82 would have been transmitted out an untrusted port. replyRxUntrusted(10) indicates that a BOOTP Reply was received on an untrusted port. srcMacChaddrMismatch(11) indicates that the source MAC address of the packet did not match the BOOTP CHADDR of the packet. staticEntryExisted(12) indicates that the static entry to be added already exists. dbAddErr(13) indicates that adding an entry to the database failed.') atArpsecVariablesTable = MibTable((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2), ) if mibBuilder.loadTexts: atArpsecVariablesTable.setStatus('current') if mibBuilder.loadTexts: atArpsecVariablesTable.setDescription('This table contains rows of DHCP Snooping ARP Security information.') atArpsecVariablesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2, 1), ).setIndexNames((0, "AT-DHCPSN-MIB", "atArpsecIfIndex")) if mibBuilder.loadTexts: atArpsecVariablesEntry.setStatus('current') if mibBuilder.loadTexts: atArpsecVariablesEntry.setDescription('A set of parameters that describe the DHCP Snooping ARP Security features.') atArpsecIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: atArpsecIfIndex.setStatus('current') if mibBuilder.loadTexts: atArpsecIfIndex.setDescription('Ifindex of the port that the ARP packet was received on.') atArpsecClientIP = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2, 1, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: atArpsecClientIP.setStatus('current') if mibBuilder.loadTexts: atArpsecClientIP.setDescription('Source IP address of the ARP packet.') atArpsecSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: atArpsecSrcMac.setStatus('current') if mibBuilder.loadTexts: atArpsecSrcMac.setDescription('Source MAC address of the ARP packet.') atArpsecVid = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: atArpsecVid.setStatus('current') if mibBuilder.loadTexts: atArpsecVid.setDescription('VLAN ID of the port that the ARP packet was received on.') atArpsecVioType = MibTableColumn((1, 3, 6, 1, 4, 1, 207, 8, 4, 4, 4, 537, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("srcIpNotFound", 1), ("badVLAN", 2), ("badPort", 3), ("srcIpNotAllocated", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: atArpsecVioType.setStatus('current') if mibBuilder.loadTexts: atArpsecVioType.setDescription('The reason that the trap was generated. srcIpNotFound(1) indicates that the Sender IP address of the ARP packet was not found in the DHCP Snooping database. badVLAN(2) indicates that the VLAN of the DHCP Snooping binding entry associated with the Sender IP address of the ARP packet does not match the VLAN that the ARP packet was received on. badPort(3) indicates that the port of the DHCP Snooping binding entry associated with the Sender IP address of the ARP packet does not match the port that the ARP packet was received on. srcIpNotAllocated(4) indicates that the CHADDR of the DHCP Snooping binding entry associated with the Sender IP address of the ARP packet does not match the Source MAC and/or the ARP source MAC of the ARP packet.') mibBuilder.exportSymbols("AT-DHCPSN-MIB", atArpsecClientIP=atArpsecClientIP, atDhcpsnEvents=atDhcpsnEvents, atArpsecSrcMac=atArpsecSrcMac, atDhcpsnVid=atDhcpsnVid, atDhcpsnCiaddr=atDhcpsnCiaddr, atDhcpsnSmac=atDhcpsnSmac, atArpsecTrap=atArpsecTrap, atArpsecVariablesTable=atArpsecVariablesTable, atArpsecVioType=atArpsecVioType, atDhcpsnOpcode=atDhcpsnOpcode, atDhcpsnChaddr=atDhcpsnChaddr, atDhcpsnVariablesEntry=atDhcpsnVariablesEntry, atDhcpsnGiaddr=atDhcpsnGiaddr, atArpsecVariablesEntry=atArpsecVariablesEntry, atDhcpsnSiaddr=atDhcpsnSiaddr, atDhcpsnYiaddr=atDhcpsnYiaddr, atDhcpsnVioType=atDhcpsnVioType, atDhcpsnIfIndex=atDhcpsnIfIndex, PYSNMP_MODULE_ID=atDhcpsn, atDhcpsnVariablesTable=atDhcpsnVariablesTable, atDhcpsn=atDhcpsn, atDhcpsnTrap=atDhcpsnTrap, atArpsecIfIndex=atArpsecIfIndex, atArpsecVid=atArpsecVid)
142.910112
1,246
0.76885
1,623
12,719
6.02403
0.174368
0.009614
0.100951
0.00941
0.451979
0.337015
0.271965
0.257952
0.219904
0.172139
0
0.060336
0.100873
12,719
88
1,247
144.534091
0.794596
0.025159
0
0
0
0.0375
0.386037
0.007264
0
0
0
0
0
1
0
false
0
0.0875
0
0.0875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
39c3eeca149c4cd77fbd4531c48fce23fb12f44a
743
py
Python
Udemy/Secao2/aula05.py
rafaelgama/Curso_Python
908231de9de4a17f5aa829f2671fd88de9261eda
[ "MIT" ]
1
2020-05-07T20:21:15.000Z
2020-05-07T20:21:15.000Z
Udemy/Secao2/aula05.py
rafaelgama/Curso_Python
908231de9de4a17f5aa829f2671fd88de9261eda
[ "MIT" ]
null
null
null
Udemy/Secao2/aula05.py
rafaelgama/Curso_Python
908231de9de4a17f5aa829f2671fd88de9261eda
[ "MIT" ]
null
null
null
# Operadores Aritméticos print('Multiplicação *', 10*10) # quando usados numeros, são operadores normais. print('Mutiplicação * ', 10* 'A') # quando usados com strings, eles servem ocmo repetidores. print('Adção + ', 10+10) # quando usados numeros é feita a soma, mas quando usados strings é feita a concatenação. print('Subtração -', 10 -5) print('Divisão / ', 10 / 3) print('Divisão Inteira //', 10 // 3) print('Divisão Modulo %', 10 % 3) print('Potenciação **', 2**4) # Ordem de procedência """ ( n + n ) - Os parênteses têm a maior precedência, contas dentro deles são realizadas primeiro ** - Depois vem a exponenciação * / // % - Na sequência multiplicação, divisão, divisão inteira e módulo + - - Por fim, soma e subtração """
33.772727
115
0.689098
99
743
5.171717
0.565657
0.09375
0.046875
0.0625
0.089844
0
0
0
0
0
0
0.03928
0.177658
743
21
116
35.380952
0.798691
0.317631
0
0
0
0
0.420233
0
0
0
0
0
0
1
0
true
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
2
39f6d8f1a79c35e38c33272f8310f5832459166c
767
py
Python
SQL_obj_DOMINE/PFAM_SQL.py
diogo1790team/inphinity_DM
b20d75ee0485e1f406a25efcf5f2855631166c38
[ "MIT" ]
1
2019-03-11T12:59:37.000Z
2019-03-11T12:59:37.000Z
SQL_obj_DOMINE/PFAM_SQL.py
diogo1790team/inphinity_DM
b20d75ee0485e1f406a25efcf5f2855631166c38
[ "MIT" ]
21
2018-10-17T14:52:30.000Z
2019-06-03T12:43:58.000Z
SQL_obj_DOMINE/PFAM_SQL.py
diogo1790team/inphinity_DM
b20d75ee0485e1f406a25efcf5f2855631166c38
[ "MIT" ]
6
2019-02-28T07:40:14.000Z
2019-09-23T13:31:54.000Z
# -*- coding: utf-8 -*- """ Created on Tue Apr 10 13:28:58 2018 @author: Diogo """ from DAL import * class _PFAM_sql(object): """ This class manipulate the PFAM table in the database DOMINE Typically a domain is: PFXXXXX The FK are manipulated in the lasts positions of the parameters """ def __init__(self, db_name = "domine_db_out"): self.db_name = db_name def get_all_domains(self): """ return all the domains in the database DOMINE. Return only the id (PFXXXXX) :return: cursor with all domains :rtype Cursor list """ sql_string = "select DomainAcc from PFAM" dalObj = DAL(self.db_name, sql_string) results = dalObj.executeSelect() return results
23.242424
83
0.634941
104
767
4.528846
0.596154
0.050955
0.063694
0.080679
0
0
0
0
0
0
0
0.023508
0.279009
767
33
84
23.242424
0.82821
0.469361
0
0
0
0
0.115727
0
0
0
0
0
0
1
0.222222
false
0
0.111111
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
f2cfab4196250e262385afdfb11456e8e873a3fb
983
py
Python
module1-introduction-to-sql/query.py
illicitDev/DS-Unit-3-Sprint-2-SQL-and-Databases
630d7bee2af1959b32f7be4eb633897ec0f9c9df
[ "MIT" ]
null
null
null
module1-introduction-to-sql/query.py
illicitDev/DS-Unit-3-Sprint-2-SQL-and-Databases
630d7bee2af1959b32f7be4eb633897ec0f9c9df
[ "MIT" ]
null
null
null
module1-introduction-to-sql/query.py
illicitDev/DS-Unit-3-Sprint-2-SQL-and-Databases
630d7bee2af1959b32f7be4eb633897ec0f9c9df
[ "MIT" ]
null
null
null
TOTAL_CHARACTERS = """ SELECT COUNT(name) FROM charactercreator_character; """ TOTAL_SUBCLASS = """ SELECT (SELECT COUNT(*) FROM charactercreator_cleric ) as cleric, (SELECT COUNT(*) FROM charactercreator_fighter ) AS fighter, (SELECT COUNT(*) FROM charactercreator_mage ) AS mage, (SELECT COUNT(*) FROM charactercreator_necromancer ) AS necromancer, (SELECT COUNT(*) FROM charactercreator_thief ) AS theif; """ TOTAL_ITEMS = """ SELECT COUNT(*) FROM armory_item; """ TOTAL_WEPONS = """ SELECT COUNT(*) FROM armory_weapon; """ NON_WEPONS = """ SELECT item_id, item_ptr_id FROM armory_item INNER JOIN armory_weapon ON armory_item.item_id = armory_weapon.item_ptr_id; """ CHARACTER_ITEMS = """ SELECT character_id, COUNT(character_id) FROM charactercreator_character_inventory GROUP BY character_id; """
21.369565
55
0.628688
101
983
5.831683
0.29703
0.149406
0.178268
0.263158
0
0
0
0
0
0
0
0
0.277721
983
45
56
21.844444
0.829577
0
0
0.317073
0
0
0.855544
0.21058
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f2d76527eec87d4415e9e573aa9ebc62b07113ae
672
py
Python
flags/tests/test_management_commands_enable_flag.py
mdunc/django-flags
7fdb7a67da25df197f53df4bfa06c8e5175944a3
[ "CC0-1.0" ]
142
2018-07-27T15:38:13.000Z
2022-03-19T19:09:44.000Z
flags/tests/test_management_commands_enable_flag.py
mdunc/django-flags
7fdb7a67da25df197f53df4bfa06c8e5175944a3
[ "CC0-1.0" ]
64
2018-06-25T14:21:35.000Z
2022-03-14T17:42:18.000Z
flags/tests/test_management_commands_enable_flag.py
mdunc/django-flags
7fdb7a67da25df197f53df4bfa06c8e5175944a3
[ "CC0-1.0" ]
24
2018-10-09T20:05:36.000Z
2022-03-29T16:34:30.000Z
from io import StringIO from django.core.management import call_command from django.core.management.base import CommandError from django.test import TestCase from flags.state import flag_enabled class EnableFlagTestCase(TestCase): def test_enable_flag(self): out = StringIO() self.assertFalse(flag_enabled("DB_FLAG")) call_command("enable_flag", "DB_FLAG", stdout=out) self.assertTrue(flag_enabled("DB_FLAG")) self.assertIn("Successfully enabled", out.getvalue()) def test_enable_flag_non_existent_flag(self): with self.assertRaises(CommandError): call_command("enable_flag", "FLAG_DOES_NOT_EXIST")
32
62
0.738095
85
672
5.588235
0.435294
0.084211
0.058947
0.101053
0
0
0
0
0
0
0
0
0.169643
672
20
63
33.6
0.851254
0
0
0
0
0
0.122024
0
0
0
0
0
0.266667
1
0.133333
false
0
0.333333
0
0.533333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
f2dd71f415f020c8f7ce7dac3e427ace6b4bf76f
6,763
py
Python
test/loader/test_link_neighbor_loader.py
NucciTheBoss/pytorch_geometric
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
[ "MIT" ]
2,350
2021-09-12T08:32:50.000Z
2022-03-31T18:09:36.000Z
test/loader/test_link_neighbor_loader.py
NucciTheBoss/pytorch_geometric
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
[ "MIT" ]
588
2021-09-12T08:49:08.000Z
2022-03-31T21:02:13.000Z
test/loader/test_link_neighbor_loader.py
NucciTheBoss/pytorch_geometric
e220a2c08fa1b2f1672d616c22eac2a67b5c8967
[ "MIT" ]
505
2021-09-13T13:13:32.000Z
2022-03-31T15:54:00.000Z
import pytest import torch from torch_geometric.data import Data, HeteroData from torch_geometric.loader import LinkNeighborLoader def get_edge_index(num_src_nodes, num_dst_nodes, num_edges): row = torch.randint(num_src_nodes, (num_edges, ), dtype=torch.long) col = torch.randint(num_dst_nodes, (num_edges, ), dtype=torch.long) return torch.stack([row, col], dim=0) def unique_edge_pairs(edge_index): return set(map(tuple, edge_index.t().tolist())) @pytest.mark.parametrize('directed', [True, False]) @pytest.mark.parametrize('neg_sampling_ratio', [0.0, 1.0]) def test_homogeneous_link_neighbor_loader(directed, neg_sampling_ratio): torch.manual_seed(12345) pos_edge_index = get_edge_index(100, 50, 500) neg_edge_index = get_edge_index(100, 50, 500) neg_edge_index[1, :] += 50 edge_label_index = torch.cat([pos_edge_index, neg_edge_index], dim=-1) edge_label = torch.cat([torch.ones(500), torch.zeros(500)], dim=0) data = Data() data.edge_index = pos_edge_index data.x = torch.arange(100) data.edge_attr = torch.arange(500) loader = LinkNeighborLoader( data, num_neighbors=[-1] * 2, batch_size=20, edge_label_index=edge_label_index, edge_label=edge_label if neg_sampling_ratio == 0.0 else None, directed=directed, neg_sampling_ratio=neg_sampling_ratio, shuffle=True, ) assert str(loader) == 'LinkNeighborLoader()' assert len(loader) == 1000 / 20 for batch in loader: assert isinstance(batch, Data) assert len(batch) == 5 assert batch.x.size(0) <= 100 assert batch.x.min() >= 0 and batch.x.max() < 100 assert batch.edge_index.min() >= 0 assert batch.edge_index.max() < batch.num_nodes assert batch.edge_attr.min() >= 0 assert batch.edge_attr.max() < 500 if neg_sampling_ratio == 0.0: assert batch.edge_label_index.size(1) == 20 # Assert positive samples are present in the original graph: edge_index = unique_edge_pairs(batch.edge_index) edge_label_index = batch.edge_label_index[:, batch.edge_label == 1] edge_label_index = unique_edge_pairs(edge_label_index) assert len(edge_index | edge_label_index) == len(edge_index) # Assert negative samples are not present in the original graph: edge_index = unique_edge_pairs(batch.edge_index) edge_label_index = batch.edge_label_index[:, batch.edge_label == 0] edge_label_index = unique_edge_pairs(edge_label_index) assert len(edge_index & edge_label_index) == 0 else: assert batch.edge_label_index.size(1) == 40 assert torch.all(batch.edge_label[:20] == 1) assert torch.all(batch.edge_label[20:] == 0) @pytest.mark.parametrize('directed', [True, False]) @pytest.mark.parametrize('neg_sampling_ratio', [0.0, 1.0]) def test_heterogeneous_link_neighbor_loader(directed, neg_sampling_ratio): torch.manual_seed(12345) data = HeteroData() data['paper'].x = torch.arange(100) data['author'].x = torch.arange(100, 300) data['paper', 'paper'].edge_index = get_edge_index(100, 100, 500) data['paper', 'paper'].edge_attr = torch.arange(500) data['paper', 'author'].edge_index = get_edge_index(100, 200, 1000) data['paper', 'author'].edge_attr = torch.arange(500, 1500) data['author', 'paper'].edge_index = get_edge_index(200, 100, 1000) data['author', 'paper'].edge_attr = torch.arange(1500, 2500) loader = LinkNeighborLoader( data, num_neighbors=[-1] * 2, edge_label_index=('paper', 'author'), batch_size=20, directed=directed, neg_sampling_ratio=neg_sampling_ratio, shuffle=True, ) assert str(loader) == 'LinkNeighborLoader()' assert len(loader) == 1000 / 20 for batch in loader: assert isinstance(batch, HeteroData) if neg_sampling_ratio == 0.0: assert len(batch) == 4 # Assert positive samples are present in the original graph: edge_index = unique_edge_pairs(batch['paper', 'author'].edge_index) edge_label_index = batch['paper', 'author'].edge_label_index edge_label_index = unique_edge_pairs(edge_label_index) assert len(edge_index | edge_label_index) == len(edge_index) else: assert len(batch) == 5 assert batch['paper', 'author'].edge_label_index.size(1) == 40 assert torch.all(batch['paper', 'author'].edge_label[:20] == 1) assert torch.all(batch['paper', 'author'].edge_label[20:] == 0) @pytest.mark.parametrize('directed', [True, False]) def test_heterogeneous_link_neighbor_loader_loop(directed): torch.manual_seed(12345) data = HeteroData() data['paper'].x = torch.arange(100) data['author'].x = torch.arange(100, 300) data['paper', 'paper'].edge_index = get_edge_index(100, 100, 500) data['paper', 'author'].edge_index = get_edge_index(100, 200, 1000) data['author', 'paper'].edge_index = get_edge_index(200, 100, 1000) loader = LinkNeighborLoader(data, num_neighbors=[-1] * 2, edge_label_index=('paper', 'paper'), batch_size=20, directed=directed) for batch in loader: assert batch['paper'].x.size(0) <= 100 assert batch['paper'].x.min() >= 0 and batch['paper'].x.max() < 100 # Assert positive samples are present in the original graph: edge_index = unique_edge_pairs(batch['paper', 'paper'].edge_index) edge_label_index = batch['paper', 'paper'].edge_label_index edge_label_index = unique_edge_pairs(edge_label_index) assert len(edge_index | edge_label_index) == len(edge_index) def test_link_neighbor_loader_edge_label(): torch.manual_seed(12345) edge_index = get_edge_index(100, 100, 500) data = Data(edge_index=edge_index, x=torch.arange(100)) loader = LinkNeighborLoader( data, num_neighbors=[-1] * 2, batch_size=10, neg_sampling_ratio=1.0, ) for batch in loader: assert batch.edge_label.dtype == torch.float assert torch.all(batch.edge_label[:10] == 1.0) assert torch.all(batch.edge_label[10:] == 0.0) loader = LinkNeighborLoader( data, num_neighbors=[-1] * 2, batch_size=10, edge_label=torch.ones(500, dtype=torch.long), neg_sampling_ratio=1.0, ) for batch in loader: assert batch.edge_label.dtype == torch.long assert torch.all(batch.edge_label[:10] == 2) assert torch.all(batch.edge_label[10:] == 0)
35.594737
79
0.649564
916
6,763
4.545852
0.112445
0.097262
0.09414
0.050192
0.809558
0.747839
0.678434
0.627281
0.604707
0.571806
0
0.052531
0.225935
6,763
189
80
35.783069
0.742884
0.035339
0
0.496403
0
0
0.050468
0
0
0
0
0
0.244604
1
0.043165
false
0
0.028777
0.007194
0.086331
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f2dd7616a332813eff056e52f78abb0ce59a02ae
1,264
py
Python
setup.py
explo-fees/cavelink
685f98db2f542f2b62b8b7da88a2c906f08a8ce9
[ "MIT" ]
null
null
null
setup.py
explo-fees/cavelink
685f98db2f542f2b62b8b7da88a2c906f08a8ce9
[ "MIT" ]
null
null
null
setup.py
explo-fees/cavelink
685f98db2f542f2b62b8b7da88a2c906f08a8ce9
[ "MIT" ]
null
null
null
# coding: utf-8 """ A simple module to fetch Cavelink values by parsing the HTML page of sensors. """ from setuptools import find_packages, setup with open('README.rst', 'r') as f: long_description = f.read() setup( name='cavelink', version='1.1.2', author='Sébastien Pittet', author_email='sebastien@pittet.org', description='Fetch Cavelink data by parsing the webpage of sensors.', long_description=long_description, url='https://github.com/explo-fees/cavelink', keywords='speleo cave sensor', packages=find_packages(), license='MIT', platforms='any', install_requires=['python-dateutil', 'requests'], classifiers=[ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 2', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: Other Audience' ] )
32.410256
77
0.633703
141
1,264
5.631206
0.609929
0.167506
0.220403
0.163728
0
0
0
0
0
0
0
0.017418
0.227848
1,264
38
78
33.263158
0.796107
0.072785
0
0
0
0
0.545533
0
0
0
0
0
0
1
0
false
0
0.032258
0
0.032258
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f2e24ce823a4e53c37a4b4ed88d507a906c58ca0
229
py
Python
socket/base/client.py
PlusLius/awesome-python
beba4b8de8d863c10e4c0c7040b47e63ca24cf3f
[ "MIT" ]
null
null
null
socket/base/client.py
PlusLius/awesome-python
beba4b8de8d863c10e4c0c7040b47e63ca24cf3f
[ "MIT" ]
null
null
null
socket/base/client.py
PlusLius/awesome-python
beba4b8de8d863c10e4c0c7040b47e63ca24cf3f
[ "MIT" ]
null
null
null
# -*-coding:utf-8-*- # 导入socket模块 import socket # 实例化客户端 s = socket.socket() # 保持和服务器的主机名端口一致 host = socket.gethostname() port = 12345 # 连接服务器 s.connect((host,port)) # 接收的数据为1024个字节 print s.recv(1024) # 接收完数据后断开连接 s.close()
11.45
27
0.694323
29
229
5.482759
0.724138
0
0
0
0
0
0
0
0
0
0
0.071795
0.148472
229
19
28
12.052632
0.74359
0.358079
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.142857
null
null
0.142857
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
f2f9a868f7dc31005789f384690840fd580d22ab
4,829
py
Python
blackjack.py
ColeNorvell/blackjack_py
b61b418b60ee40e376a031e4d6953419bd7fd0a8
[ "Unlicense" ]
null
null
null
blackjack.py
ColeNorvell/blackjack_py
b61b418b60ee40e376a031e4d6953419bd7fd0a8
[ "Unlicense" ]
null
null
null
blackjack.py
ColeNorvell/blackjack_py
b61b418b60ee40e376a031e4d6953419bd7fd0a8
[ "Unlicense" ]
null
null
null
import random import sys class Card: def __init__(self, suit, value, face, ace): self.suit = suit self.value = value self.face = face self.ace = ace class Deck: def __init__(self): # Create a List to Hold all of the Cards in our Deck self.card = [] # Fill self.card List With Instances of the Class "Card" Starting with Spades self.card.append(Card("Spades", 2, False, False)) self.card.append(Card("Spades", 3, False, False)) self.card.append(Card("Spades", 4, False, False)) self.card.append(Card("Spades", 5, False, False)) self.card.append(Card("Spades", 6, False, False)) self.card.append(Card("Spades", 7, False, False)) self.card.append(Card("Spades", 8, False, False)) self.card.append(Card("Spades", 9, False, False)) self.card.append(Card("Spades", 10, False, False)) # Create Hearts self.card.append(Card("Hearts", 2, False, False)) self.card.append(Card("Hearts", 3, False, False)) self.card.append(Card("Hearts", 4, False, False)) self.card.append(Card("Hearts", 5, False, False)) self.card.append(Card("Hearts", 6, False, False)) self.card.append(Card("Hearts", 7, False, False)) self.card.append(Card("Hearts", 8, False, False)) self.card.append(Card("Hearts", 9, False, False)) self.card.append(Card("Hearts", 10, False, False)) # Create Clubs self.card.append(Card("Clubs", 2, False, False)) self.card.append(Card("Clubs", 3, False, False)) self.card.append(Card("Clubs", 4, False, False)) self.card.append(Card("Clubs", 5, False, False)) self.card.append(Card("Clubs", 6, False, False)) self.card.append(Card("Clubs", 7, False, False)) self.card.append(Card("Clubs", 8, False, False)) self.card.append(Card("Clubs", 9, False, False)) self.card.append(Card("Clubs", 10, False, False)) # Create Diamonds self.card.append(Card("Diamonds", 2, False, False)) self.card.append(Card("Diamonds", 3, False, False)) self.card.append(Card("Diamonds", 4, False, False)) self.card.append(Card("Diamonds", 5, False, False)) self.card.append(Card("Diamonds", 6, False, False)) self.card.append(Card("Diamonds", 7, False, False)) self.card.append(Card("Diamonds", 8, False, False)) self.card.append(Card("Diamonds", 9, False, False)) self.card.append(Card("Diamonds", 10, False, False)) # Create Face Cards and Ace for Suit Spades self.card.append(Card("Spades", 10, "King", False)) self.card.append(Card("Spades", 10, "Queen", False)) self.card.append(Card("Spades", 10, "Jack", False)) self.card.append(Card("Spades", 11, False, True)) # Create Face Cards and Ace for Suit Hearts self.card.append(Card("Hearts", 10, "King", False)) self.card.append(Card("Hearts", 10, "Queen", False)) self.card.append(Card("Hearts", 10, "Jack", False)) self.card.append(Card("Hearts", 11, False, True)) # Create Face Cards and Ace for Suit Clubs self.card.append(Card("Clubs", 10, "King", False)) self.card.append(Card("Clubs", 10, "Queen", False)) self.card.append(Card("Clubs", 10, "Jack", False)) self.card.append(Card("Clubs", 11, False, True)) # Create Face Cards and Ace for Suit Diamonds self.card.append(Card("Diamonds", 10, "King", False)) self.card.append(Card("Diamonds", 10, "Queen", False)) self.card.append(Card("Diamonds", 10, "Jack", False)) self.card.append(Card("Diamonds", 11, False, True)) def shuffle(self): self.top_card = 0 for i in range(1000): firstRandomCardIndex = random.randint(0,51) secondRandomCardIndex = random.randint(0,51) placeholder = self.card[firstRandomCardIndex] self.card[firstRandomCardIndex] = self.card[secondRandomCardIndex] self.card[secondRandomCardIndex] = placeholder def deal(self): return self.card[self.top_card - 1] self.top_card = self.top_card + 1 class PokerPlayer: def __init__(self, dealer): self.dealer = dealer # dealer is a boolean value self.hand = [] self.number_of_cards_held = 0 player = PokerPlayer(False) dealer = PokerPlayer(True) deck = Deck() deck.shuffle() player.hand.append(deck.deal()) dealer.hand.append(deck.deal()) player.hand.append(deck.deal()) dealer.hand.append(deck.deal()) print("Dealers First Card: " + dealer.hand[0].suit) print("Dealers Second Card: " + dealer.hand[1].suit) print("Players First Card: " + player.hand[0].suit) print("Players Second Card: " + player.hand[1].suit)
43.9
85
0.617312
648
4,829
4.570988
0.118827
0.159352
0.24578
0.316003
0.723835
0.691425
0.600608
0.071911
0.071911
0.071911
0
0.023987
0.223028
4,829
109
86
44.302752
0.765458
0.075378
0
0.043956
0
0
0.103053
0
0
0
0
0
0
1
0.054945
false
0
0.021978
0
0.120879
0.043956
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
844654ee48a5d774c468d9507374c68f279c27f5
319
py
Python
src/sima/riflex/matrixstorage.py
SINTEF/simapy
650b8c2f15503dad98e2bfc0d0788509593822c7
[ "MIT" ]
null
null
null
src/sima/riflex/matrixstorage.py
SINTEF/simapy
650b8c2f15503dad98e2bfc0d0788509593822c7
[ "MIT" ]
null
null
null
src/sima/riflex/matrixstorage.py
SINTEF/simapy
650b8c2f15503dad98e2bfc0d0788509593822c7
[ "MIT" ]
null
null
null
# Generated with MatrixStorage # from enum import Enum from enum import auto class MatrixStorage(Enum): """""" SKYLINE = auto() SPARSE = auto() def label(self): if self == MatrixStorage.SKYLINE: return "Skyline" if self == MatrixStorage.SPARSE: return "Sparse"
21.266667
41
0.605016
33
319
5.848485
0.454545
0.082902
0.145078
0
0
0
0
0
0
0
0
0
0.294671
319
15
42
21.266667
0.857778
0.087774
0
0
1
0
0.046099
0
0
0
0
0
0
1
0.1
false
0
0.2
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
84467d8fca79064dff4bdeadd98cfeca3e461aec
362
py
Python
wildlifecompliance/migrations/0176_merge_20190429_0959.py
preranaandure/wildlifecompliance
bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5
[ "Apache-2.0" ]
1
2020-12-07T17:12:40.000Z
2020-12-07T17:12:40.000Z
wildlifecompliance/migrations/0176_merge_20190429_0959.py
preranaandure/wildlifecompliance
bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5
[ "Apache-2.0" ]
14
2020-01-08T08:08:26.000Z
2021-03-19T22:59:46.000Z
wildlifecompliance/migrations/0176_merge_20190429_0959.py
preranaandure/wildlifecompliance
bc19575f7bccf7e19adadbbaf5d3eda1d1aee4b5
[ "Apache-2.0" ]
15
2020-01-08T08:02:28.000Z
2021-11-03T06:48:32.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-04-29 01:59 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0175_merge_20190427_1257'), ('wildlifecompliance', '0174_merge_20190426_1804'), ] operations = [ ]
21.294118
59
0.685083
41
362
5.780488
0.804878
0
0
0
0
0
0
0
0
0
0
0.168966
0.198895
362
16
60
22.625
0.648276
0.187845
0
0
1
0
0.28866
0.164948
0
0
0
0
0
1
0
false
0
0.222222
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
84484c618e087b473f0cda04948e5e44db6943d4
213
py
Python
genesis/utano/urls.py
EmersonAlvaro/genesis
7e4b32320c45ee48983a9869fba3bc75686a4e76
[ "MIT" ]
null
null
null
genesis/utano/urls.py
EmersonAlvaro/genesis
7e4b32320c45ee48983a9869fba3bc75686a4e76
[ "MIT" ]
null
null
null
genesis/utano/urls.py
EmersonAlvaro/genesis
7e4b32320c45ee48983a9869fba3bc75686a4e76
[ "MIT" ]
null
null
null
from django.urls import path from . import views urlpatterns = [ path('', views.Home, name='Utano_Home'), path('output', views.output,name="script"), path('falar/', views.Falar, name='Utano_Falar'), ]
26.625
52
0.666667
28
213
5
0.464286
0.128571
0
0
0
0
0
0
0
0
0
0
0.150235
213
8
53
26.625
0.773481
0
0
0
0
0
0.182243
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
844d0ac522b943f3ec3c288ceaabe640a2f5f284
3,280
py
Python
google-api-client-generator/src/googleapis/codegen/cpp_import_manager_test.py
cclauss/discovery-artifact-manager
9eb6bcef290ef87006758349c725d440fbfc85d6
[ "Apache-2.0" ]
178
2015-03-20T13:37:14.000Z
2022-01-09T15:57:47.000Z
google-api-client-generator/src/googleapis/codegen/cpp_import_manager_test.py
cclauss/discovery-artifact-manager
9eb6bcef290ef87006758349c725d440fbfc85d6
[ "Apache-2.0" ]
183
2017-03-23T17:17:24.000Z
2022-02-09T00:07:17.000Z
google-api-client-generator/src/googleapis/codegen/cpp_import_manager_test.py
cclauss/discovery-artifact-manager
9eb6bcef290ef87006758349c725d440fbfc85d6
[ "Apache-2.0" ]
51
2015-03-23T20:32:40.000Z
2021-08-04T15:42:57.000Z
#!/usr/bin/python2.7 # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for cpp_import_manager.""" __author__ = 'ewiseblatt@google.com (Eric Wiseblatt)' from google.apputils import basetest from googleapis.codegen.cpp_import_manager import CppImportManager class MockSchema(object): """Mock schema used in place of real schema objects.""" def __init__(self): self._template_values_dict = {} def SetTemplateValue(self, template_name, template_value): self._template_values_dict[template_name] = template_value def GetTemplateValue(self, template_name): return self._template_values_dict.get(template_name) class CppImportManagerTest(basetest.TestCase): def setUp(self): super(CppImportManagerTest, self).setUp() self.mock_schema = MockSchema() self.import_manager = CppImportManager(self.mock_schema) def testAddImportAndCommit(self): # Add a com.google import. com_google_import = '"base/integral_types.h"' self.assertTrue(self.import_manager.AddImport(com_google_import)) self.assertFalse(self.import_manager.platform_imports) self.assertFalse(self.import_manager.other_imports) self.assertTrue(self.import_manager.google_imports) # There are no platform imports for C++ platform_import = '<string>' self.assertTrue(self.import_manager.AddImport(platform_import)) self.assertTrue(self.import_manager.platform_imports) # Add a random thing other_import = '"Module.h"' self.import_manager.AddImport(other_import) # Assert the contents of google, other and java imports. expected_google_import_set = set() expected_google_import_set.add(com_google_import) sorted_expected_google_import_set = sorted(expected_google_import_set) self.assertEquals(sorted_expected_google_import_set, list(self.import_manager.google_imports)) self.assertEquals([other_import], list(self.import_manager.other_imports)) self.assertEquals([platform_import], list(self.import_manager.platform_imports)) # Assert the contents of class_name_to_qualified_name map. self.assertEquals( com_google_import, self.import_manager._class_name_to_qualified_name[com_google_import]) # Assert that commit import works. # The import_manager combines the platform and google imports together # but each segment is first sorted. expected_import_list = [ sorted([platform_import]) + sorted_expected_google_import_set, [other_import]] self.assertEquals( expected_import_list, self.mock_schema.GetTemplateValue('importManager').ImportLists()) if __name__ == '__main__': basetest.main()
36.853933
77
0.74878
415
3,280
5.650602
0.368675
0.083156
0.086994
0.058849
0.235394
0.092111
0
0
0
0
0
0.003667
0.168598
3,280
88
78
37.272727
0.856252
0.307317
0
0.043478
0
0
0.044643
0.019643
0
0
0
0
0.23913
1
0.108696
false
0
0.695652
0.021739
0.869565
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
8463b81df100d1ff5a3543a9f34c515d6438f9ac
4,268
py
Python
teleband/assignments/migrations/0001_initial.py
JMU-CIME/CPR-Music-Backend
b72b70ed8826595c96c028595181293edcf1e368
[ "MIT" ]
2
2022-01-08T20:21:43.000Z
2022-03-18T03:31:30.000Z
teleband/assignments/migrations/0001_initial.py
JMU-CIME/CPR-Music-Backend
b72b70ed8826595c96c028595181293edcf1e368
[ "MIT" ]
16
2022-01-08T02:12:54.000Z
2022-03-02T03:02:59.000Z
teleband/assignments/migrations/0001_initial.py
JMU-CIME/CPR-Music-Backend
b72b70ed8826595c96c028595181293edcf1e368
[ "MIT" ]
2
2022-01-08T00:21:37.000Z
2022-01-18T05:33:15.000Z
# Generated by Django 3.2.11 on 2022-01-07 02:15 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("musics", "0001_initial"), ("instruments", "0001_initial"), ] operations = [ migrations.CreateModel( name="Activity", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("body", models.TextField()), ], options={ "verbose_name": "Activity", "verbose_name_plural": "Activities", }, ), migrations.CreateModel( name="ActivityCategory", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=255)), ], options={ "verbose_name": "Activity Category", "verbose_name_plural": "Activity Categories", }, ), migrations.CreateModel( name="Assignment", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("deadline", models.DateField(blank=True, null=True)), ( "activity", models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to="assignments.activity", ), ), ( "instrument", models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to="instruments.instrument", ), ), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, ), ), ], ), migrations.CreateModel( name="ActivityType", fields=[ ( "id", models.BigAutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=255, unique=True)), ( "category", models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to="assignments.activitycategory", ), ), ], options={ "verbose_name": "Activity Type", "verbose_name_plural": "Activity Types", }, ), migrations.AddField( model_name="activity", name="activity_type", field=models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to="assignments.activitytype", ), ), migrations.AddField( model_name="activity", name="part", field=models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to="musics.parttransposition", ), ), ]
31.382353
72
0.39925
272
4,268
6.121324
0.286765
0.066066
0.058859
0.092492
0.505105
0.505105
0.458258
0.458258
0.458258
0.458258
0
0.014252
0.506795
4,268
135
73
31.614815
0.776722
0.010778
0
0.617188
1
0
0.11327
0.023223
0
0
0
0
0
1
0
false
0
0.023438
0
0.054688
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
8466564c8192cf1285921f2336cb711d6519e870
66,023
py
Python
basin3d/plugins/usgs_huc_codes.py
heycatwonton/basin3d
980a52fa8d6c073cd6704950cfd4168c7f4b0f61
[ "BSD-3-Clause-LBNL" ]
4
2021-04-21T17:34:42.000Z
2021-12-16T09:02:59.000Z
basin3d/plugins/usgs_huc_codes.py
heycatwonton/basin3d
980a52fa8d6c073cd6704950cfd4168c7f4b0f61
[ "BSD-3-Clause-LBNL" ]
92
2020-12-10T03:48:11.000Z
2022-03-24T15:12:22.000Z
basin3d/plugins/usgs_huc_codes.py
heycatwonton/basin3d
980a52fa8d6c073cd6704950cfd4168c7f4b0f61
[ "BSD-3-Clause-LBNL" ]
5
2021-03-23T16:30:53.000Z
2021-12-19T14:28:03.000Z
CONTENT = """huc basin <!--10n 60s--> 01 New England 0101 St. John 010100 St. John 01010001 Upper St. John 01010002 Allagash 01010003 Fish 01010004 Aroostook 01010005 Meduxnekeag 0102 Penobscot 010200 Penobscot 01020001 West Branch Penobscot 01020002 East Branch Penobscot 01020003 Mattawamkeag 01020004 Piscataquis 01020005 Lower Penobscot 0103 Kennebec 010300 Kennebec 01030001 Upper Kennebec 01030002 Dead 01030003 Lower Kennebec 0104 Androscoggin 010400 Androscoggin 01040001 Upper Androscoggin 01040002 Lower Androscoggin 0105 Maine Coastal 010500 Maine Coastal 01050001 St. Croix 01050002 Maine Coastal 01050003 St. George-Sheepscot 0106 Saco 010600 Saco 01060001 Presumpscot 01060002 Saco 01060003 Piscataqua-Salmon Falls 0107 Merrimack 010700 Merrimack 01070001 Pemigewasset 01070002 Merrimack 01070003 Contoocook 01070004 Nashua 01070005 Concord 0108 Connecticut 010801 Upper Connecticut 01080101 Upper Connecticut 01080102 Passumpsic 01080103 Waits 01080104 Upper Connecticut-Mascoma 01080105 White 01080106 Black-Ottauquechee 01080107 West 010802 Lower Connecticut 01080201 Middle Connecticut 01080202 Miller 01080203 Deerfield 01080204 Chicopee 01080205 Lower Connecticut 01080206 Westfield 01080207 Farmington 0109 Massachusetts-Rhode Island Coastal 010900 Massachusetts-Rhode Island Coastal 01090001 Charles 01090002 Cape Cod 01090003 Blackstone 01090004 Narragansett 01090005 Pawcatuck-Wood 0110 Connecticut Coastal 011000 Connecticut Coastal 01100001 Quinebaug 01100002 Shetucket 01100003 Thames 01100004 Quinnipiac 01100005 Housatonic 01100006 Saugatuck 01100007 Long Island Sound 0111 St. Francois 011100 St. Francois 01110000 St. Francois 02 Mid Atlantic 0201 Richelieu 020100 Richelieu 02010001 Lake George 02010002 Otter 02010003 Winooski 02010004 Ausable 02010005 Lamoille 02010006 Great Chazy-Saranac 02010007 Missisquoi 0202 Upper Hudson 020200 Upper Hudson 02020001 Upper Hudson 02020002 Sacandaga 02020003 Hudson-Hoosic 02020004 Mohawk 02020005 Schoharie 02020006 Middle Hudson 02020007 Rondout 02020008 Hudson-Wappinger 0203 Lower Hudson-Long Island 020301 Lower Hudson 02030101 Lower Hudson 02030102 Bronx 02030103 Hackensack-Passaic 02030104 Sandy Hook-Staten Island 02030105 Raritan 020302 Long Island 02030201 Northern Long Island 02030202 Southern Long Island 0204 Delaware 020401 Upper Delaware 02040101 Upper Delaware 02040102 East Branch Delaware 02040103 Lackawaxen 02040104 Middle Delaware-Mongaup-Brodhead 02040105 Middle Delaware-Musconetcong 02040106 Lehigh 020402 Lower Delaware 02040201 Crosswicks-Neshaminy 02040202 Lower Delaware 02040203 Schuylkill 02040204 Delaware Bay 02040205 Brandywine-Christina 02040206 Cohansey-Maurice 02040207 Broadkill-Smyrna 020403 New Jersey Coastal 02040301 Mullica-Toms 02040302 Great Egg Harbor 0205 Susquehanna 020501 Upper Susquehanna 02050101 Upper Susquehanna 02050102 Chenango 02050103 Owego-Wappasening 02050104 Tioga 02050105 Chemung 02050106 Upper Susquehanna-Tunkhannock 02050107 Upper Susquehanna-Lackawanna 020502 West Branch Susquehanna 02050201 Upper West Branch Susquehanna 02050202 Sinnemahoning 02050203 Middle West Branch Susquehanna 02050204 Bald Eagle 02050205 Pine 02050206 Lower West Branch Susquehanna 020503 Lower Susquehanna 02050301 Lower Susquehanna-Penns 02050302 Upper Juniata 02050303 Raystown 02050304 Lower Juniata 02050305 Lower Susquehanna-Swatara 02050306 Lower Susquehanna 0206 Upper Chesapeake 020600 Upper Chesapeake 02060001 Upper Chesapeake Bay 02060002 Chester-Sassafras 02060003 Gunpowder-Patapsco 02060004 Severn 02060005 Choptank 02060006 Patuxent 02060007 Blackwater-Wicomico 02060008 Nanticoke 02060009 Pocomoke 02060010 Chincoteague 0207 Potomac 020700 Potomac 02070001 South Branch Potomac 02070002 North Branch Potomac 02070003 Cacapon-Town 02070004 Conococheague-Opequon 02070005 South Fork Shenandoah 02070006 North Fork Shenandoah 02070007 Shenandoah 02070008 Middle Potomac-Catoctin 02070009 Monocacy 02070010 Middle Potomac-Anacostia-Occoquan 02070011 Lower Potomac 0208 Lower Chesapeake 020801 Lower Chesapeake 02080101 Lower Chesapeake Bay 02080102 Great Wicomico-Piankatank 02080103 Rapidan-Upper Rappahannock 02080104 Lower Rappahannock 02080105 Mattaponi 02080106 Pamunkey 02080107 York 02080108 Lynnhaven-Poquoson 02080109 Western Lower Delmarva 02080110 Eastern Lower Delmarva 020802 James 02080201 Upper James 02080202 Maury 02080203 Middle James-Buffalo 02080204 Rivanna 02080205 Middle James-Willis 02080206 Lower James 02080207 Appomattox 02080208 Hampton Roads 03 South Atlantic-Gulf 0301 Chowan-Roanoke 030101 Roanoke 03010101 Upper Roanoke 03010102 Middle Roanoke 03010103 Upper Dan 03010104 Lower Dan 03010105 Banister 03010106 Roanoke Rapids 03010107 Lower Roanoke 030102 Albemarle-Chowan 03010201 Nottoway 03010202 Blackwater 03010203 Chowan 03010204 Meherrin 03010205 Albemarle 0302 Neuse-Pamlico 030201 Pamlico 03020101 Upper Tar 03020102 Fishing 03020103 Lower Tar 03020104 Pamlico 03020105 Pamlico Sound 03020106 Bogue-Core Sounds 030202 Neuse 03020201 Upper Neuse 03020202 Middle Neuse 03020203 Contentnea 03020204 Lower Neuse 0303 Cape Fear 030300 Cape Fear 03030001 New 03030002 Haw 03030003 Deep 03030004 Upper Cape Fear 03030005 Lower Cape Fear 03030006 Black 03030007 Northeast Cape Fear 0304 Pee Dee 030401 Upper Pee Dee 03040101 Upper Yadkin 03040102 South Yadkin 03040103 Lower Yadkin 03040104 Upper Pee Dee 03040105 Rocky 030402 Lower Pee Dee 03040201 Lower Pee Dee 03040202 Lynches 03040203 Lumber 03040204 Little Pee Dee 03040205 Black 03040206 Waccamaw 03040207 Carolina Coastal-Sampit 0305 Edisto-Santee 030501 Santee 03050101 Upper Catawba 03050102 South Fork Catawba 03050103 Lower Catawba 03050104 Wateree 03050105 Upper Broad 03050106 Lower Broad 03050107 Tyger 03050108 Enoree 03050109 Saluda 03050110 Congaree 03050111 Lake Marion 03050112 Santee 030502 Edisto-South Carolina Coastal 03050201 Cooper 03050202 South Carolina Coastal 03050203 North Fork Edisto 03050204 South Fork Edisto 03050205 Edisto 03050206 Four Hole Swamp 03050207 Salkehatchie 03050208 Broad-St. Helena 0306 Ogeechee-Savannah 030601 Savannah 03060101 Seneca 03060102 Tugaloo 03060103 Upper Savannah 03060104 Broad 03060105 Little 03060106 Middle Savannah 03060107 Stevens 03060108 Brier 03060109 Lower Savannah 030602 Ogeechee 03060201 Upper Ogeechee 03060202 Lower Ogeechee 03060203 Canoochee 03060204 Ogeechee Coastal 0307 Altamaha - St. Marys 030701 Altamaha 03070101 Upper Oconee 03070102 Lower Oconee 03070103 Upper Ocmulgee 03070104 Lower Ocmulgee 03070105 Little Ocmulgee 03070106 Altamaha 03070107 Ohoopee 030702 St. Marys - Satilla 03070201 Satilla 03070202 Little Satilla 03070203 Cumberland-St. Simons 03070204 St. Marys 03070205 Nassau 0308 St. Johns 030801 St. Johns 03080101 Upper St. Johns 03080102 Oklawaha 03080103 Lower St. Johns 030802 East Florida Coastal 03080201 Daytona - St. Augustine 03080202 Cape Canaveral 03080203 Vero Beach 0309 Southern Florida 030901 Kissimmee 03090101 Kissimmee 03090102 Northern Okeechobee Inflow 03090103 Western Okeechobee Inflow 030902 Southern Florida 03090201 Lake Okeechobee 03090202 Everglades 03090203 Florida Bay-Florida Keys 03090204 Big Cypress Swamp 03090205 Caloosahatchee 0310 Peace-Tampa Bay 031001 Peace 03100101 Peace 03100102 Myakka 03100103 Charlotte Harbor 031002 Tampa Bay 03100201 Sarasota Bay 03100202 Manatee 03100203 Little Manatee 03100204 Alafia 03100205 Hillsborough 03100206 Tampa Bay 03100207 Crystal-Pithlachascotee 03100208 Withlacoochee 0311 Suwannee 031101 Aucilla-Waccasassa 03110101 Waccasassa 03110102 Econfina-Steinhatchee 03110103 Aucilla 031102 Suwannee 03110201 Upper Suwannee 03110202 Alapaha 03110203 withlacoochee 03110204 Little 03110205 Lower Suwannee 03110206 Santa Fe 0312 Ochlockonee 031200 Ochlockonee. Georgia 03120001 Apalachee Bay-St. Marks 03120002 Upper Ochlockonee 03120003 Lower Ochlockonee 0313 Apalachicola 031300 Apalachicola 03130001 Upper Chattahoochee 03130002 Middle Chattahoochee-Lake Harding 03130003 Middle Chattahoochee-Walter F. George Reservoir 03130004 Lower Chattahoochee 03130005 Upper Flint 03130006 Middle Flint 03130007 Kinchafoonee-Muckalee 03130008 Lower Flint 03130009 Ichawaynochaway 03130010 Spring 03130011 Apalachicola 03130012 Chipola 03130013 New 03130014 Apalachicola Bay 0314 Choctawhatchee - Escambia 031401 Florida Panhandle Coastal 03140101 St. Andrew-St. Joseph Bays 03140102 Choctawhatchee Bay 03140103 Yellow 03140104 Blackwater 03140105 Pensacola Bay 03140106 Perdido 03140107 Perdido Bay 031402 Choctawhatchee 03140201 Upper Choctawhatchee 03140202 Pea 03140203 Lower Choctawhatchee 031403 Escambia 03140301 Upper Conecuh 03140302 Patsaliga 03140303 Sepulga 03140304 Lower Conecuh 03140305 Escambia 0315 Alabama 031501 Coosa-Tallapoosa 03150101 Conasauga 03150102 Coosawattee 03150103 Oostanaula 03150104 Etowah 03150105 Upper Coosa 03150106 Middle Coosa 03150107 Lower Coosa 03150108 Upper Tallapoosa 03150109 Middle Tallapoosa 03150110 Lower Tallapoosa 031502 Alabama 03150201 Upper Alabama 03150202 Cahaba 03150203 Middle Alabama 03150204 Lower Alabama 0316 Mobile - Tombigbee 031601 Black Warrior - Tombigbee 03160101 Upper Tombigbee 03160102 Town 03160103 Buttahatchee 03160104 Tibbee 03160105 Luxapallila 03160106 Middle Tombigbee-Lubbub 03160107 Sipsey 03160108 Noxubee 03160109 Mulberry 03160110 Sipsey Fork 03160111 Locust 03160112 Upper Black Warrior 03160113 Lower Black Warrior 031602 Mobile Bay- Tombigbee 03160201 Middle Tombigbee-Chickasaw 03160202 Sucarnoochee 03160203 Lower Tambigbee 03160204 Mobile - Tensaw 03160205 Mobile Bay 0317 Pascagoula 031700 Pascagoula. Mississippi 03170001 Chunky-Okatibbee 03170002 Upper Chickasawhay 03170003 Lower Chickasawhay 03170004 Upper Leaf 03170005 Lower Leaf 03170006 Pascagoula 03170007 Black 03170008 Escatawpa 03170009 Mississippi Coastal 0318 Pearl 031800 Pearl 03180001 Upper Pearl 03180002 Middle Pearl-Strong 03180003 Middle Pearl-Silver 03180004 Lower Pearl. Mississippi 03180005 Bogue Chitto 04 Great Lakes 0401 Western Lake Superior 040101 Northwestern Lake Superior 04010101 Baptism-Brule 04010102 Beaver-Lester 040102 St. Louis 04010201 St. Louis 04010202 Cloquet 040103 Southwestern Lake Superior 04010301 Beartrap-Nemadji 04010302 Bad-Montreal 0402 Southern Lake Superior-Lake Superior 040201 Southcentral Lake Superior 04020101 Black-Presque Isle 04020102 Ontonagon 04020103 Keweenaw Peninsula 04020104 Sturgeon 04020105 Dead-Kelsey 040202 Southeastern Lake Superior 04020201 Betsy-Chocolay 04020202 Tahquamenon 04020203 Waiska 040203 Lake Superior 04020300 Lake Superior 0403 Northwestern Lake Michigan 040301 Northwestern Lake Michigan 04030101 Manitowoc-Sheboygan 04030102 Door-Kewaunee 04030103 Duck-Pensaukee 04030104 Oconto 04030105 Peshtigo 04030106 Brule 04030107 Michigamme 04030108 Menominee 04030109 Cedar-Ford 04030110 Escanaba 04030111 Tacoosh-Whitefish 04030112 Fishdam-Sturgeon 040302 Fox 04030201 Upper Fox 04030202 Wolf 04030203 Lake Winnebago 04030204 Lower Fox 0404 Southwestern Lake Michigan 040400 Southwestern Lake Michigan 04040001 Little Calumet-Galien 04040002 Pike-Root 04040003 Milwaukee 0405 Southeastern Lake Michigan 040500 Southeastern Lake Michigan 04050001 St. Joseph 04050002 Black-Macatawa 04050003 Kalamazoo 04050004 Upper Grand 04050005 Maple 04050006 Lower Grand 04050007 Thornapple 0406 Northeastern Lake Michigan-Lake Michigan 040601 Northeastern Lake Michigan 04060101 Pere Marquette-White 04060102 Muskegon 04060103 Manistee 04060104 Betsie-Platte 04060105 Boardman-Charlevoix 04060106 Manistique 04060107 Brevoort-Millecoquins 040602 Lake Michigan 04060200 Lake Michigan 0407 Northwestern Lake Huron 040700 Northwestern Lake Huron 04070001 St. Marys 04070002 Carp-Pine 04070003 Lone Lake-Ocqueoc 04070004 Cheboygan 04070005 Black 04070006 Thunder Bay 04070007 Au Sable 0408 Southwestern Lake Huron-Lake Huron 040801 Southwestern Lake Huron 04080101 Au Gres-Rifle 04080102 Kawkawlin-Pine 04080103 Pigeon-Wiscoggin 04080104 Birch-Willow 040802 Saginaw 04080201 Tittabawassee 04080202 Pine 04080203 Shiawassee 04080204 Flint 04080205 Cass 04080206 Saginaw 040803 Lake Huron 04080300 Lake Huron 0409 St. Clair-Detroit 040900 St. Clair-Detroit 04090001 St. Clair 04090002 Lake St. Clair 04090003 Clinton 04090004 Detroit 04090005 Huron 0410 Western Lake Erie 041000 Western Lake Erie 04100001 Ottawa-Stony 04100002 Raisin 04100003 St. Joseph 04100004 St. Marys 04100005 Upper Maumee 04100006 Tiffin 04100007 Auglaize 04100008 Blanchard 04100009 Lower Maumee 04100010 Cedar-Portage 04100011 Sandusky 04100012 Huron-Vermilion 0411 Southern Lake Erie 041100 Southern Lake Erie 04110001 Black-Rocky 04110002 Cuyahoga 04110003 Ashtabula-Chagrin 04110004 Grand 0412 Eastern Lake Erie-Lake Erie 041201 Eastern Lake Erie 04120101 Chautauqua-Conneaut 04120102 Cattaraugus 04120103 Buffalo-Eighteenmile 04120104 Niagara 041202 Lake Erie 04120200 Lake Erie 0413 Southwestern Lake Ontario 041300 Southwestern Lake Ontario 04130001 Oak Orchard-Twelvemile 04130002 Upper Genesee 04130003 Lower Genesee 0414 Southeastern Lake Ontario 041401 Southeastern Lake Ontario 04140101 Irondequoit-Ninemile 04140102 Salmon-Sandy 041402 Oswego 04140201 Seneca 04140202 Oneida 04140203 Oswego 0415 Northeastern Lake Ontario-Lake Ontario-St. Lawrence 041501 Northeastern Lake Ontario 04150101 Black 04150102 Chaumont-Perch 041502 Lake Ontario 04150200 Lake Ontario 041503 St. Lawrence 04150301 Upper St. Lawrence 04150302 Oswegatchie 04150303 Indian 04150304 Grass 04150305 Raquette 04150306 St. Regis 04150307 English-Salmon 05 Ohio 0501 Allegheny 050100 Allegheny 05010001 Upper Allegheny 05010002 Conewango 05010003 Middle Allegheny-Tionesta 05010004 French 05010005 Clarion 05010006 Middle Allegheny-Redbank 05010007 Conemaugh 05010008 Kiskiminetas 05010009 Lower Allegheny 0502 Monongahela 050200 Monongahela 05020001 Tygart Valley 05020002 West Fork 05020003 Upper Monongahela 05020004 Cheat 05020005 Lower Monongahela 05020006 Youghiogheny 0503 Upper Ohio 050301 Upper Ohio-Beaver 05030101 Upper Ohio 05030102 Shenango 05030103 Mahoning 05030104 Beaver 05030105 Connoquenessing 05030106 Upper Ohio-Wheeling 050302 Upper Ohio-Little Kanawha 05030201 Little Muskingum-Middle Island 05030202 Upper Ohio-Shade 05030203 Little Kanawha 05030204 Hocking 0504 Muskingum 050400 Muskingum 05040001 Tuscarawas 05040002 Mohican 05040003 Walhonding 05040004 Muskingum 05040005 Wills 05040006 Licking 0505 Kanawha 050500 Kanawha 05050001 Upper New 05050002 Middle New 05050003 Greenbrier 05050004 Lower New 05050005 Gauley 05050006 Upper Kanawha 05050007 Elk 05050008 Lower Kanawha 05050009 Coal 0506 Scioto 050600 Scioto 05060001 Upper Scioto 05060002 Lower Scioto 05060003 Paint 0507 Big Sandy-Guyandotte 050701 Guyandotte 05070101 Upper Guyandotte 05070102 Lower Guyandotte 050702 Big Sandy 05070201 Tug 05070202 Upper Levisa 05070203 Lower Levisa 05070204 Big Sandy 0508 Great Miami 050800 Great Miami 05080001 Upper Great Miami 05080002 Lower Great Miami 05080003 Whitewater 0509 Middle Ohio 050901 Middle Ohio-Raccoon 05090101 Raccoon-Symmes 05090102 Twelvepole 05090103 Little Scioto-Tygarts 05090104 Little Sandy 050902 Middle Ohio-Little Miami 05090201 Ohio Brush-Whiteoak 05090202 Little Miami 05090203 Middle Ohio-Laughery 0510 Kentucky-Licking 051001 Licking 05100101 Licking 05100102 South Fork Licking 051002 Kentucky 05100201 North Fork Kentucky 05100202 Middle Fork Kentucky 05100203 South Fork Kentucky 05100204 Upper Kentucky 05100205 Lower Kentucky 0511 Green 051100 Green 05110001 Upper Green 05110002 Barren 05110003 Middle Green 05110004 Rough 05110005 Lower Green 05110006 Pond 0512 Wabash 051201 Wabash 05120101 Upper Wabash 05120102 Salamonie 05120103 Mississinewa 05120104 Eel 05120105 Middle Wabash-Deer 05120106 Tippecanoe 05120107 Wildcat 05120108 Middle Wabash-Little Vermilion 05120109 Vermilion 05120110 Sugar 05120111 Middle Wabash-Busseron 05120112 Embarras 05120113 Lower Wabash 05120114 Little Wabash 05120115 Skillet 051202 Patoka-White 05120201 Upper White 05120202 Lower White 05120203 Eel 05120204 Driftwood 05120205 Flatrock-Haw 05120206 Upper East Fork White 05120207 Muscatatuck 05120208 Lower East Fork White 05120209 Patoka 0513 Cumberland 051301 Upper Cumberland 05130101 Upper Cumberland 05130102 Rockcastle 05130103 Upper Cumberland-Lake Cumberland 05130104 South Fork Cumberland 05130105 Obey 05130106 Upper Cumberland-Cordell Hull 05130107 Collins 05130108 Caney 051302 Lower Cumberland 05130201 Lower Cumberland-Old Hickory Lake 05130202 Lower Cumberland-Sycamore 05130203 Stones 05130204 Harpeth 05130205 Lower Cumberland 05130206 Red 0514 Lower Ohio 051401 Lower Ohio-Salt 05140101 Silver-Little Kentucky 05140102 Salt 05140103 Rolling Fork 05140104 Blue-Sinking 051402 Lower Ohio 05140201 Lower Ohio-Little Pigeon 05140202 Highland-Pigeon 05140203 Lower Ohio-Bay 05140204 Saline 05140205 Tradewater 05140206 Lower Ohio 06 Tennessee 0601 Upper Tennessee 060101 French Broad-Holston 06010101 North Fork Holston 06010102 South Fork Holston 06010103 Watauga 06010104 Holston 06010105 Upper French Broad 06010106 Pigeon 06010107 Lower French Broad 06010108 Nolichucky 060102 Upper Tennessee 06010201 Watts Bar Lake 06010202 Upper Little Tennessee 06010203 Tuckasegee 06010204 Lower Little Tennessee 06010205 Upper Clinch 06010206 Powell 06010207 Lower Clinch 06010208 Emory 0602 Middle Tennessee-Hiwassee 060200 Middle Tennessee-Hiwassee 06020001 Middle Tennessee-Chickamauga 06020002 Hiwassee 06020003 Ocoee 06020004 Sequatchie 0603 Middle Tennessee-Elk 060300 Middle Tennessee-Elk 06030001 Guntersville Lake 06030002 Wheeler Lake 06030003 Upper Elk 06030004 Lower Elk 06030005 Pickwick Lake 06030006 Bear 0604 Lower Tennessee 060400 Lower Tennessee 06040001 Lower Tennessee-Beech 06040002 Upper Duck 06040003 Lower Duck 06040004 Buffalo 06040005 Kentucky Lake 06040006 Lower Tennessee 07 Upper Mississippi 0701 Mississippi Headwaters 070101 Mississippi Headwaters 07010101 Mississippi Headwaters 07010102 Leech Lake 07010103 Prairie-Willow 07010104 Elk-Nokasippi 07010105 Pine 07010106 Crow Wing 07010107 Redeye 07010108 Long Prairie 070102 Upper Mississippi-Crow-Rum 07010201 Platte-Spunk 07010202 Sauk 07010203 Clearwater-Elk 07010204 Crow 07010205 South Fork Crow 07010206 Twin Cities 07010207 Rum 0702 Minnesota 070200 Minnesota 07020001 Upper Minnesota 07020002 Pomme De Terre 07020003 Lac Qui Parle 07020004 Hawk-Yellow Medicine 07020005 Chippewa 07020006 Redwood 07020007 Middle Minnesota 07020008 Cottonwood 07020009 Blue Earth 07020010 Watonwan 07020011 Le Sueur 07020012 Lower Minnesota 0703 St. Croix 070300 St. Croix 07030001 Upper St. Croix 07030002 Namekagon 07030003 Kettle 07030004 Snake 07030005 Lower St. Croix 0704 Upper Mississippi-Black-Root 070400 Upper Mississippi-Black-Root 07040001 Rush-Vermillion 07040002 Cannon 07040003 Buffalo-Whitewater 07040004 Zumbro 07040005 Trempealeau 07040006 La Crosse-Pine 07040007 Black 07040008 Root 0705 Chippewa 070500 Chippewa 07050001 Upper Chippewa 07050002 Flambeau 07050003 South Fork Flambeau 07050004 Jump 07050005 Lower Chippewa 07050006 Eau Claire 07050007 Red Cedar 0706 Upper Mississippi-Maquoketa-Plum 070600 Upper Mississippi-Maquoketa-Plum 07060001 Coon-Yellow 07060002 Upper Iowa 07060003 Grant-Little Maquoketa 07060004 Turkey 07060005 Apple-Plum 07060006 Maquoketa 0707 Wisconsin 070700 Wisconsin 07070001 Upper Wisconsin 07070002 Lake Dubay 07070003 Castle Rock 07070004 Baraboo 07070005 Lower Wisconsin 07070006 Kickapoo 0708 Upper Mississippi-Iowa-Skunk-Wapsipinicon 070801 Upper Mississippi-Skunk-Wapsipinicon 07080101 Copperas-Duck 07080102 Upper Wapsipinicon 07080103 Lower Wapsipinicon 07080104 Flint-Henderson 07080105 South Skunk 07080106 North Skunk 07080107 Skunk 070802 Iowa 07080201 Upper Cedar 07080202 Shell Rock 07080203 Winnebago 07080204 West Fork Cedar 07080205 Middle Cedar 07080206 Lower Cedar 07080207 Upper Iowa 07080208 Middle Iowa 07080209 Lower Iowa 0709 Rock 070900 Rock 07090001 Upper Rock 07090002 Crawfish 07090003 Pecatonica 07090004 Sugar 07090005 Lower Rock 07090006 Kishwaukee 07090007 Green 0710 Des Moines 071000 Des Moines 07100001 Des Moines Headwaters 07100002 Upper Des Moines 07100003 East Fork Des Moines 07100004 Middle Des Moines 07100005 Boone 07100006 North Raccoon 07100007 South Raccoon 07100008 Lake Red Rock 07100009 Lower Des Moines 0711 Upper Mississippi-Salt 071100 Upper Mississippi-Salt 07110001 Bear-Wyaconda 07110002 North Fabius 07110003 South Fabius 07110004 The Sny 07110005 North Fork Salt 07110006 South Fork Salt 07110007 Salt 07110008 Cuivre 07110009 Peruque-Piasa 0712 Upper Illinois 071200 Upper Illinois 07120001 Kankakee 07120002 Iroquois 07120003 Chicago 07120004 Des Plaines 07120005 Upper Illinois 07120006 Upper Fox 07120007 Lower Fox 0713 Lower Illinois 071300 Lower Illinois 07130001 Lower Illinois-Senachwine Lake 07130002 Vermilion 07130003 Lower Illinois-Lake Chautauqua 07130004 Mackinaw 07130005 Spoon 07130006 Upper Sangamon 07130007 South Fork Sangamon 07130008 Lower Sangamon 07130009 Salt 07130010 La Moine 07130011 Lower Illinois 07130012 Macoupin 0714 Upper Mississippi-Kaskaskia-Meramec 071401 Upper Mississippi-Meramec 07140101 Cahokia-Joachim 07140102 Meramec 07140103 Bourbeuse 07140104 Big 07140105 Upper Mississippi-Cape Girardeau 07140106 Big Muddy 07140107 Whitewater 07140108 Cache 071402 Kaskaskia 07140201 Upper Kaskaskia 07140202 Middle Kaskaskia 07140203 Shoal 07140204 Lower Kaskaskia 08 Lower Mississippi 0801 Lower Mississippi-Hatchie 080101 Lower Mississippi-Memphis 08010100 Lower Mississippi-Memphis 080102 Hatchie-Obion 08010201 Bayou De Chien-Mayfield 08010202 Obion 08010203 South Fork Obion 08010204 North Fork Forked Deer 08010205 South Fork Forked Deer 08010206 Forked Deer 08010207 Upper Hatchie 08010208 Lower Hatchie 08010209 Loosahatchie 08010210 Wolf 08010211 Horn Lake-Nonconnah 0802 Lower Mississippi - St. Francis 080201 Lower Mississippi-Helena 08020100 Lower Mississippi-Helena 080202 St. Francis 08020201 New Madrid-St. Johns 08020202 Upper St. Francis 08020203 Lower St. Francis 08020204 Little River Ditches 08020205 L'anguille 080203 Lower White 08020301 Lower White-Bayou Des Arc 08020302 Cache 08020303 Lower White 08020304 Big 080204 Lower Arkansas 08020401 Lower Arkansas 08020402 Bayou Meto 0803 Lower Mississippi - Yazoo 080301 Lower Mississippi-Greenville 08030100 Lower Mississippi-Greenville 080302 Yazoo 08030201 Little Tallahatchie 08030202 Tallahatchie 08030203 Yocona 08030204 Coldwater 08030205 Yalobusha 08030206 Upper Yazoo 08030207 Big Sunflower 08030208 Lower Yazoo 08030209 Deer-Steele 0804 Lower Red - Ouachita 080401 Upper Ouachita 08040101 Ouachita Headwaters 08040102 Upper Ouachita 08040103 Little Missouri 080402 Lower Ouachita 08040201 Lower Ouachita-Smackover 08040202 Lower Ouachita-Bayou De Loutre 08040203 Upper Saline 08040204 Lower Saline 08040205 Bayou Bartholomew 08040206 Bayou D'arbonne 08040207 Lower Ouachita 080403 Lower Red 08040301 Lower Red 08040302 Castor 08040303 Dugdemona 08040304 Little 08040305 Black 08040306 Bayou Cocodrie 0805 Boeuf-Tensas 080500 Boeuf-Tensas 08050001 Boeuf 08050002 Bayou Macon 08050003 Tensas 0806 Lower Mississippi - Big Black 080601 Lower Mississippi-Natchez 08060100 Lower Mississippi-Natchez 080602 Big Black - Homochitto 08060201 Upper Big Black 08060202 Lower Big Black 08060203 Bayou Pierre 08060204 Coles Creek 08060205 Homochitto 08060206 Buffalo 0807 Lower Mississippi-Lake Maurepas 080701 Lower Mississippi-Baton Rouge 08070100 Lower Mississippi-Baton Rouge 080702 Lake Maurepas 08070201 Bayou Sara-Thompson 08070202 Amite 08070203 Tickfaw 08070204 Lake Maurepas 08070205 Tangipahoa 080703 Lower Grand 08070300 Lower Grand 0808 Louisiana Coastal 080801 Atchafalaya - Vermilion 08080101 Atchafalaya 08080102 Bayou Teche 08080103 Vermilion 080802 Calcasieu - Mermentau 08080201 Mermentau Headwaters 08080202 Mermentau 08080203 Upper Calcasieu 08080204 Whisky Chitto 08080205 West Fork Calcasieu 08080206 Lower Calcasieu 0809 Lower Mississippi 080901 Lower Mississippi-New Orleans 08090100 Lower Mississippi-New Orleans 080902 Lake Pontchartrain 08090201 Liberty Bayou-Tchefuncta 08090202 Lake Pontchartrain 08090203 Eastern Louisiana Coastal 080903 Central Louisiana Coastal 08090301 East Central Louisiana Coastal 08090302 West Central Louisiana Coastal 09 Souris-Red-Rainy 0901 Souris 090100 Souris 09010001 Upper Souris 09010002 Des Lacs 09010003 Lower Souris 09010004 Willow 09010005 Deep 0902 Red 090201 Upper Red 09020101 Bois De Sioux 09020102 Mustinka 09020103 Otter Tail 09020104 Upper Red 09020105 Western Wild Rice 09020106 Buffalo 09020107 Elm-Marsh 09020108 Eastern Wild Rice 09020109 Goose 090202 Devils Lake-Sheyenne 09020201 Devils Lake 09020202 Upper Sheyenne 09020203 Middle Sheyenne 09020204 Lower Sheyenne 09020205 Maple 090203 Lower Red 09020301 Sandhill-Wilson 09020302 Red Lakes 09020303 Red Lake 09020304 Thief 09020305 Clearwater 09020306 Grand Marais-Red 09020307 Turtle 09020308 Forest 09020309 Snake 09020310 Park 09020311 Lower Red 09020312 Two Rivers 09020313 Pembina 09020314 Roseau 0903 Rainy 090300 Rainy 09030001 Rainy Headwaters 09030002 Vermilion 09030003 Rainy Lake 09030004 Upper Rainy 09030005 Little Fork 09030006 Big Fork 09030007 Rapid 09030008 Lower Rainy 09030009 Lake of the Woods 10 Missouri 1001 Saskatchewan 100100 Saskatchewan 10010001 Belly 10010002 St. Mary 1002 Missouri Headwaters 100200 Missouri Headwaters 10020001 Red Rock 10020002 Beaverhead 10020003 Ruby 10020004 Big Hole 10020005 Jefferson 10020006 Boulder 10020007 Madison 10020008 Gallatin 1003 Missouri-Marias 100301 Upper Missouri 10030101 Upper Missouri 10030102 Upper Missouri-Dearborn 10030103 Smith 10030104 Sun 10030105 Belt 100302 Marias 10030201 Two Medicine 10030202 Cut Bank 10030203 Marias 10030204 Willow 10030205 Teton 1004 Missouri-Musselshell 100401 Fort Peck Lake 10040101 Bullwhacker-Dog 10040102 Arrow 10040103 Judith 10040104 Fort Peck Reservoir 10040105 Big Dry 10040106 Little Dry 100402 Musselshell 10040201 Upper Musselshell 10040202 Middle Musselshell 10040203 Flatwillow 10040204 Box Elder 10040205 Lower Musselshell 1005 Milk 100500 Milk 10050001 Milk Headwaters 10050002 Upper Milk 10050003 Wild Horse Lake 10050004 Middle Milk 10050005 Big Sandy 10050006 Sage 10050007 Lodge 10050008 Battle 10050009 Peoples 10050010 Cottonwood 10050011 Whitewater 10050012 Lower Milk 10050013 Frenchman 10050014 Beaver 10050015 Rock 10050016 Porcupine 1006 Missouri-Poplar 100600 Missouri-Poplar 10060001 Prarie Elk-Wolf 10060002 Redwater 10060003 Poplar 10060004 West Fork Poplar 10060005 Charlie-Little Muddy 10060006 Big Muddy 10060007 Brush Lake closed basin 1007 Upper Yellowstone 100700 Upper Yellowstone 10070001 Yellowstone Headwaters 10070002 Upper Yellowstone 10070003 Shields 10070004 Upper Yellowstone-Lake Basin 10070005 Stillwater 10070006 Clarks Fork Yellowstone 10070007 Upper Yellowstone-Pompeys Pillar 10070008 Pryor 1008 Big Horn 100800 Big Horn 10080001 Upper Wind 10080002 Little Wind 10080003 Popo Agie 10080004 Muskrat 10080005 Lower Wind 10080006 Badwater 10080007 Upper Bighorn 10080008 Nowood 10080009 Greybull 10080010 Big Horn Lake 10080011 Dry 10080012 North Fork Shoshone 10080013 South Fork Shoshone 10080014 Shoshone 10080015 Lower Bighorn 10080016 Little Bighorn 1009 Powder-Tongue 100901 Tongue 10090101 Upper Tongue 10090102 Lower Tongue 100902 Powder 10090201 Middle Fork Powder 10090202 Upper Powder 10090203 South Fork Powder 10090204 Salt 10090205 Crazy Woman 10090206 Clear 10090207 Middle Powder 10090208 Little Powder 10090209 Lower Powder 10090210 Mizpah 1010 Lower Yellowstone 101000 Lower Yellowstone 10100001 Lower Yellowstone-Sunday 10100002 Big Porcupine 10100003 Rosebud 10100004 Lower Yellowstone 10100005 O'fallon 1011 Missouri-Little Missouri 101101 Lake Sakakawea 10110101 Lake Sakakawea 10110102 Little Muddy 101102 Little Missouri 10110201 Upper Little Missouri 10110202 Boxelder 10110203 Middle Little Missouri 10110204 Beaver 10110205 Lower Little Missouri 1012 Cheyenne 101201 Cheyenne 10120101 Antelope 10120102 Dry Fork Cheyenne 10120103 Upper Cheyenne 10120104 Lance 10120105 Lightning 10120106 Angostura Reservoir 10120107 Beaver 10120108 Hat 10120109 Middle Cheyenne-Spring 10120110 Rapid 10120111 Middle Cheyenne-Elk 10120112 Lower Cheyenne 10120113 Cherry 101202 Belle Fourche 10120201 Upper Belle Fourche 10120202 Lower Belle Fourche 10120203 Redwater 1013 Missouri-Oahe 101301 Lake Oahe 10130101 Painted Woods-Square Butte 10130102 Upper Lake Oahe 10130103 Apple 10130104 Beaver 10130105 Lower Lake Oahe 10130106 West Missouri Coteau 101302 Cannonball-Heart-Knife 10130201 Knife 10130202 Upper Heart 10130203 Lower Heart 10130204 Upper Cannonball 10130205 Cedar 10130206 Lower Cannonball 101303 Grand-Moreau 10130301 North Fork Grand 10130302 South Fork Grand 10130303 Grand 10130304 South Fork Moreau 10130305 Upper Moreau 10130306 Lower Moreau 1014 Missouri-White 101401 Fort Randall Reservoir 10140101 Fort Randall Reservoir 10140102 Bad 10140103 Medicine Knoll 10140104 Medicine 10140105 Crow 101402 White 10140201 Upper White 10140202 Middle White 10140203 Little White 10140204 Lower White 1015 Niobrara 101500 Niobrara 10150001 Ponca 10150002 Niobrara Headwaters 10150003 Upper Niobrara 10150004 Middle Niobrara 10150005 Snake 10150006 Keya Paha 10150007 Lower Niobrara 1016 James 101600 James 10160001 James Headwaters 10160002 Pipestem 10160003 Upper James 10160004 Elm 10160005 Mud 10160006 Middle James 10160007 East Missouri Coteau 10160008 Snake 10160009 Turtle 10160010 North Big Sioux Coteau 10160011 Lower James 1017 Missouri-Big Sioux 101701 Lewis and Clark Lake 10170101 Lewis and Clark Lake 10170102 Vermillion 10170103 South Big Sioux Coteau 101702 Big Sioux 10170201 Middle Big Sioux Coteau 10170202 Upper Big Sioux 10170203 Lower Big Sioux 10170204 Rock 1018 North Platte 101800 North Platte 10180001 North Platte Headwaters 10180002 Upper North Platte 10180003 Pathfinder-Seminoe Reservoirs 10180004 Medicine Bow 10180005 Little Medicine Bow 10180006 Sweetwater 10180007 Middle North Platte-Casper 10180008 Glendo Reservoir 10180009 Middle North Platte-Scotts Bluff 10180010 Upper Laramie 10180011 Lower Laramie 10180012 Horse 10180013 Pumpkin 10180014 Lower North Platte 1019 South Platte 101900 South Platte 10190001 South Platte Headwaters 10190002 Upper South Platte 10190003 Middle South Platte-Cherry Creek 10190004 Clear 10190005 St. Vrain 10190006 Big Thompson 10190007 Cache La Poudre 10190008 Lone Tree-Owl 10190009 Crow 10190010 Kiowa 10190011 Bijou 10190012 Middle South Platte-Sterling 10190013 Beaver 10190014 Pawnee 10190015 Upper Lodgepole 10190016 Lower Lodgepole 10190017 Sidney Draw 10190018 Lower South Platte 1020 Platte 102001 Middle Platte 10200101 Middle Platte-Buffalo 10200102 Wood 10200103 Middle Platte-Prairie 102002 Lower Platte 10200201 Lower Platte-Shell 10200202 Lower Platte 10200203 Salt 1021 Loup 102100 Loup 10210001 Upper Middle Loup 10210002 Dismal 10210003 Lower Middle Loup 10210004 South Loup 10210005 Mud 10210006 Upper North Loup 10210007 Lower North Loup 10210008 Calamus 10210009 Loup 10210010 Cedar 1022 Elkhorn 102200 Elkhorn 10220001 Upper Elkhorn 10220002 North Fork Elkhorn 10220003 Lower Elkhorn 10220004 Logan 1023 Missouri-Little Sioux 102300 Missouri-Little Sioux 10230001 Blackbird-Soldier 10230002 Floyd 10230003 Little Sioux 10230004 Monona-Harrison Ditch 10230005 Maple 10230006 Big Papillion-Mosquito 10230007 Boyer 1024 Missouri-Nishnabotna 102400 Missouri-Nishnabotna 10240001 Keg-Weeping Water 10240002 West Nishnabotna 10240003 East Nishnabotna 10240004 Nishnabotna 10240005 Tarkio-Wolf 10240006 Little Nemaha 10240007 South Fork Big Nemaha 10240008 Big Nemaha 10240009 West Nodaway 10240010 Nodaway 10240011 Independence-Sugar 10240012 Platte 10240013 One Hundred and Two 1025 Republican 102500 Republican 10250001 Arikaree 10250002 North Fork Republican 10250003 South Fork Republican 10250004 Upper Republican 10250005 Frenchman 10250006 Stinking Water 10250007 Red Willow 10250008 Medicine 10250009 Harlan County Reservoir 10250010 Upper Sappa 10250011 Lower Sappa 10250012 South Fork Beaver 10250013 Little Beaver 10250014 Beaver 10250015 Prairie Dog 10250016 Middle Republican 10250017 Lower Republican 1026 Smoky Hill 102600 Smoky Hill 10260001 Smoky Hill Headwaters 10260002 North Fork Smoky Hill 10260003 Upper Smoky Hill 10260004 Ladder 10260005 Hackberry 10260006 Middle Smoky Hill 10260007 Big 10260008 Lower Smoky Hill 10260009 Upper Saline 10260010 Lower Saline 10260011 Upper North Fork Solomon 10260012 Lower North Fork Solomon 10260013 Upper South Fork Solomon 10260014 Lower South Fork Solomon 10260015 Solomon 1027 Kansas 102701 Kansas 10270101 Upper Kansas 10270102 Middle Kansas 10270103 Delaware 10270104 Lower Kansas 102702 Big Blue 10270201 Upper Big Blue 10270202 Middle Big Blue 10270203 West Fork Big Blue 10270204 Turkey 10270205 Lower Big Blue 10270206 Upper Little Blue 10270207 Lower Little Blue 1028 Chariton-Grand 102801 Grand 10280101 Upper Grand 10280102 Thompson 10280103 Lower Grand 102802 Chariton 10280201 Upper Chariton 10280202 Lower Chariton 10280203 Little Chariton 1029 Gasconade-Osage 102901 Osage 10290101 Upper Marais Des Cygnes 10290102 Lower Marais Des Cygnes 10290103 Little Osage 10290104 Marmaton 10290105 Harry S. Missouri 10290106 Sac 10290107 Pomme De Terre 10290108 South Grand 10290109 Lake of the Ozarks 10290110 Niangua 10290111 Lower Osage 102902 Gasconade 10290201 Upper Gasconade 10290202 Big Piney 10290203 Lower Gasconade 1030 Lower Missouri 103001 Lower Missouri-Blackwater 10300101 Lower Missouri-Crooked 10300102 Lower Missouri-Moreau 10300103 Lamine 10300104 Blackwater 103002 Lower Missouri 10300200 Lower Missouri 11 Arkansas-White-Red 1101 Upper White 110100 Upper White 11010001 Beaver Reservoir 11010002 James 11010003 Bull Shoals Lake 11010004 Middle White 11010005 Buffalo 11010006 North Fork White 11010007 Upper Black 11010008 Current 11010009 Lower Black 11010010 Spring 11010011 Eleven Point 11010012 Strawberry 11010013 Upper White-Village 11010014 Little Red 1102 Upper Arkansas 110200 Upper Arkansas 11020001 Arkansas Headwaters 11020002 Upper Arkansas 11020003 Fountain 11020004 Chico 11020005 Upper Arkansas-Lake Meredith 11020006 Huerfano 11020007 Apishapa 11020008 Horse 11020009 Upper Arkansas-John Martin 11020010 Purgatoire 11020011 Big Sandy 11020012 Rush 11020013 Two Butte 1103 Middle Arkansas 110300 Middle Arkansas 11030001 Middle Arkansas-Lake Mckinney 11030002 Whitewoman 11030003 Arkansas-Dodge City 11030004 Coon-Pickerel 11030005 Pawnee 11030006 Buckner 11030007 Upper Walnut Creek 11030008 Lower Walnut Creek 11030009 Rattlesnake 11030010 Gar-Peace 11030011 Cow 11030012 Little Arkansas 11030013 Middle Arkansas-Slate 11030014 North Fork Ninnescah 11030015 South Fork Ninnescah 11030016 Ninnescah 11030017 Upper Walnut River 11030018 Lower Walnut River 1104 Upper Cimarron 110400 Upper Cimarron 11040001 Cimarron headwaters 11040002 Upper Cimarron 11040003 North Fork Cimarron 11040004 Sand Arroyo 11040005 Bear 11040006 Upper Cimarron-Liberal 11040007 Crooked 11040008 Upper Cimarron-Bluff 1105 Lower Cimarron 110500 Lower Cimarron 11050001 Lower Cimarron-Eagle Chief 11050002 Lower Cimarron-Skeleton 11050003 Lower Cimarron 1106 Arkansas - Keystone 110600 Arkansas - Keystone 11060001 Kaw Lake 11060002 Upper Salt Fork Arkansas 11060003 Medicine Lodge 11060004 Lower Salt Fork Arkansas 11060005 Chikaskia 11060006 Black Bear-Red Rock 1107 Neosho - Verdigris 110701 Verdigris 11070101 Upper Verdigris 11070102 Fall 11070103 Middle Verdigris 11070104 Elk 11070105 Lower Verdigris 11070106 Caney 11070107 Bird 110702 Neosho 11070201 Neosho headwaters 11070202 Upper Cottonwood 11070203 Lower Cottonwood 11070204 Upper Neosho 11070205 Middle Neosho 11070206 Lake O' the Cherokees 11070207 Spring 11070208 Elk 11070209 Lower Neosho 1108 Upper Canadian 110800 Upper Canadian 11080001 Canadian headwaters 11080002 Cimarron 11080003 Upper Canadian 11080004 Mora 11080005 Conchas 11080006 Upper Canadian-Ute Reservoir 11080007 Ute 11080008 Revuelto 1109 Lower Canadian 110901 Middle Canadian 11090101 Middle Canadian-Trujillo 11090102 Punta De Agua 11090103 Rita Blanca 11090104 Carrizo 11090105 Lake Meredith 11090106 Middle Canadian-Spring 110902 Lower Canadian 11090201 Lower Canadian-Deer 11090202 Lower Canadian-Walnut 11090203 Little 11090204 Lower Canadian 1110 North Canadian 111001 Upper Beaver 11100101 Upper Beaver 11100102 Middle Beaver 11100103 Coldwater 11100104 Palo Duro 111002 Lower Beaver 11100201 Lower Beaver 11100202 Upper Wolf 11100203 Lower Wolf 111003 Lower North Canadian 11100301 Middle North Canadian 11100302 Lower North Canadian 11100303 Deep Fork 1111 Lower Arkansas 111101 Robert S. Kerr Reservoir 11110101 Polecat-Snake 11110102 Dirty-Greenleaf 11110103 Illinois 11110104 Robert S. Kerr Reservoir 11110105 Poteau 111102 Lower Arkansas-Fourche La Fave 11110201 Frog-Mulberry 11110202 Dardanelle Reservoir 11110203 Lake Conway-Point Remove 11110204 Petit Jean 11110205 Cadron 11110206 Fourche La Fave 11110207 Lower Arkansas-Maumelle 1112 Red headwaters 111201 Prairie Dog Town Fork Red 11120101 Tierra Blanca 11120102 Palo Duro 11120103 Upper Prairie Dog Town Fork Red 11120104 Tule 11120105 Lower Prairie Dog Town Fork Red 111202 Salt Fork Red 11120201 Upper Salt Fork Red 11120202 Lower Salt Fork Red 111203 North Fork Red 11120301 Upper North Fork Red 11120302 Middle North Fork Red 11120303 Lower North Fork Red 11120304 Elm Fork Red 1113 Red - Washita 111301 Red-Pease 11130101 Groesbeck-Sandy 11130102 Blue-China 11130103 North Pease 11130104 Middle Pease 11130105 Pease 111302 Red-Lake Texoma 11130201 Farmers-Mud 11130202 Cache 11130203 West Cache 11130204 North Wichita 11130205 South Wichita 11130206 Wichita 11130207 Southern Beaver 11130208 Northern Beaver 11130209 Little Wichita 11130210 Lake Texoma 111303 Washita 11130301 Washita headwaters 11130302 Upper Washita 11130303 Middle Washita 11130304 Lower Washita 1114 Red-Sulphur 111401 Red-Little 11140101 Bois D'arc-Island 11140102 Blue 11140103 Muddy Boggy 11140104 Clear Boggy 11140105 Kiamichi 11140106 Pecan-Waterhole 11140107 Upper Little 11140108 Mountain Fork 11140109 Lower Little 111402 Red-Saline 11140201 Mckinney-Posten Bayous 11140202 Middle Red-Coushatta 11140203 Loggy Bayou 11140204 Red Chute 11140205 Bodcau Bayou 11140206 Bayou Pierre 11140207 Lower Red-Lake Iatt 11140208 Saline Bayou 11140209 Black Lake Bayou 111403 Big Cypress - Sulphur 11140301 Sulphur headwaters 11140302 Lower Sulphur 11140303 White Oak Bayou 11140304 Cross Bayou 11140305 Lake O'the Pines 11140306 Caddo Lake 11140307 Little Cypress 12 Texas-Gulf 1201 Sabine 120100 Sabine 12010001 Upper Sabine 12010002 Middle Sabine 12010003 Lake Fork 12010004 Toledo Bend Reservoir 12010005 Lower Sabine 1202 Neches 120200 Neches 12020001 Upper Neches 12020002 Middle Neches 12020003 Lower Neches 12020004 Upper Angelina 12020005 Lower Angelina 12020006 Village 12020007 Pine Island Bayou 1203 Trinity 120301 Upper Trinity 12030101 Upper West Fork Trinity 12030102 Lower West Fork Trinity 12030103 Elm Fork Trinity 12030104 Denton 12030105 Upper Trinity 12030106 East Fork Trinity 12030107 Cedar 12030108 Richland 12030109 Chambers 120302 Lower Trinity 12030201 Lower Trinity-Tehuacana 12030202 Lower Trinity-Kickapoo 12030203 Lower Trinity 1204 Galveston Bay-San Jacinto 120401 San Jacinto 12040101 West Fork San Jacinto 12040102 Spring 12040103 East Fork San Jacinto 12040104 Buffalo-San Jacinto 120402 Galveston Bay-Sabine Lake 12040201 Sabine Lake 12040202 East Galveston Bay 12040203 North Galveston Bay 12040204 West Galveston Bay 12040205 Austin-Oyster 1205 Brazos headwaters 120500 Brazos headwaters 12050001 Yellow House Draw 12050002 Blackwater Draw 12050003 North Fork Double Mountain Fork 12050004 Double Moutain Fork Brazos 12050005 Running Water Draw 12050006 White 12050007 Salt Fork Brazos 1206 Middle Brazos 120601 Middle Brazos-Clear Fork 12060101 Middle Brazos-Millers 12060102 Upper Clear Fork Brazos 12060103 Paint 12060104 Lower Clear Fork Brazos 12060105 Hubbard 120602 Middle Brazos-Bosque 12060201 Middle Brazos-Palo Pinto 12060202 Middle Brazos-Lake Whitney 12060203 Bosque 12060204 North Bosque 1207 Lower Brazos 120701 Lower Brazos 12070101 Lower Brazos-Little Brazos 12070102 Yegua 12070103 Navasota 12070104 Lower Brazos 120702 Little 12070201 Leon 12070202 Cowhouse 12070203 Lampasas 12070204 Little 12070205 San Gabriel 1208 Upper Colorado 120800 Upper Colorado 12080001 Lost Draw 12080002 Colorado headwaters 12080003 Monument-Seminole Draws 12080004 Mustang Draw 12080005 Johnson Draw 12080006 Sulphur Springs Draw 12080007 Beals 12080008 Upper Colorado 1209 Lower Colorado-San Bernard Coastal 120901 Middle Colorado-Concho 12090101 Middle Colorado-Elm 12090102 South Concho 12090103 Middle Concho 12090104 North Concho 12090105 Concho 12090106 Middle Colorado 12090107 Pecan Bayou 12090108 Jim Ned 12090109 San Saba 12090110 Brady 120902 Middle Colorado-Llano 12090201 Buchanan-Lyndon B 12090202 North Llano 12090203 South Llano 12090204 Llano 12090205 Austin-Travis Lakes 12090206 Pedernales 120903 Lower Colorado 12090301 Lower Colorado-Cummins 12090302 Lower Colorado 120904 San Bernard Coastal 12090401 San Bernard 12090402 East Matagorda Bay 1210 Central Texas Coastal 121001 Lavaca 12100101 Lavaca 12100102 Navidad 121002 Guadalupe 12100201 Upper Guadalupe 12100202 Middle Guadalupe 12100203 San Marcos 12100204 Lower Guadalupe 121003 San Antonio 12100301 Upper San Antonio 12100302 Medina 12100303 Lower San Antonio 12100304 Cibolo 121004 Central Texas Coastal 12100401 Central Matagorda Bay 12100402 West Matagorda Bay 12100403 East San Antonio Bay 12100404 West San Antonio Bay 12100405 Aransas Bay 12100406 Mission 12100407 Aransas 1211 Nueces-Southwestern Texas Coastal 121101 Nueces 12110101 Nueces headwaters 12110102 West Nueces 12110103 Upper Nueces 12110104 Turkey 12110105 Middle Nueces 12110106 Upper Frio 12110107 Hondo 12110108 Lower Frio 12110109 San Miguel 12110110 Atascosa 12110111 Lower Nueces 121102 Southwestern Texas Coastal 12110201 North Corpus Christi Bay 12110202 South Corpus Christi Bay 12110203 North Laguna Madre 12110204 San Fernando 12110205 Baffin Bay 12110206 Palo Blanco 12110207 Central Laguna Madre 12110208 South Laguna Madre 13 Rio Grande 1301 Rio Grande headwaters 130100 Rio Grande headwaters 13010001 Rio Grande headwaters 13010002 Alamosa-Trinchera 13010003 San Luis 13010004 Saguache 13010005 Conejos 1302 Rio Grande-Elephant Butte 130201 Upper Rio Grande 13020101 Upper Rio Grande 13020102 Rio Chama 130202 Rio Grande-Elephant Butte 13020201 Rio Grande-Santa Fe 13020202 Jemez 13020203 Rio Grande-Albuquerque 13020204 Rio Puerco 13020205 Arroyo Chico 13020206 North Plains 13020207 Rio San Jose 13020208 Plains of San Agustin 13020209 Rio Salado 13020210 Jornada Del Muerto 13020211 Elephant Butte Reservoir 1303 Rio Grande-Mimbres 130301 Rio Grande-Caballo 13030101 Caballo 13030102 El Paso-Las Cruces 13030103 Jornada Draw 130302 Mimbres 13030201 Playas Lake 13030202 Mimbres 1304 Rio Grande-Amistad 130401 Rio Grande-Fort Quitman 13040100 Rio Grande-Fort Quitman 130402 Rio Grande-Amistad 13040201 Cibolo-Red Light 13040202 Alamito 13040203 Black Hills-Fresno 13040204 Terlingua 13040205 Big Bend 13040206 Maravillas 13040207 Santiago Draw 13040208 Reagan-Sanderson 13040209 San Francisco 13040210 Lozier Canyon 13040211 Big Canyon 13040212 Amistad Reservoir 130403 Devils 13040301 Upper Devils 13040302 Lower Devils 13040303 Dry Devils 1305 Rio Grande closed basins 130500 Rio Grande closed basins 13050001 Western Estancia 13050002 Eastern Estancia 13050003 Tularosa Valley 13050004 Salt Basin 1306 Upper Pecos 130600 Upper Pecos 13060001 Pecos headwaters 13060002 Pintada Arroyo 13060003 Upper Pecos 13060004 Taiban 13060005 Arroyo Del Macho 13060006 Gallo Arroyo 13060007 Upper Pecos-Long Arroyo 13060008 Rio Hondo 13060009 Rio Felix 13060010 Rio Penasco 13060011 Upper Pecos-Black 1307 Lower Pecos 130700 Lower Pecos 13070001 Lower Pecos-Red Bluff Reservoir 13070002 Delaware 13070003 Toyah 13070004 Salt Draw 13070005 Barrilla Draw 13070006 Coyanosa-Hackberry Draws 13070007 Landreth-Monument Draws 13070008 Lower Pecos 13070009 Tunas 13070010 Independence 13070011 Howard Draw 1308 Rio Grande-Falcon 130800 Rio Grande-Falcon 13080001 Elm-Sycamore 13080002 San Ambrosia-Santa Isabel 13080003 International Falcon Reservoir 1309 Lower Rio Grande 130900 Lower Rio Grande 13090001 Los Olmos 13090002 Lower Rio Grande 14 Upper Colorado 1401 Colorado headwaters 140100 Colorado headwaters 14010001 Colorado headwaters 14010002 Blue 14010003 Eagle 14010004 Roaring Fork 14010005 Colorado headwaters-Plateau 14010006 Parachute-Roan 1402 Gunnison 140200 Gunnison 14020001 East-Taylor 14020002 Upper Gunnison 14020003 Tomichi 14020004 North Fork Gunnison 14020005 Lower Gunnison 14020006 Uncompahange 1403 Upper Colorado-Dolores 140300 Upper Colorado-Dolores 14030001 Westwater Canyon 14030002 Upper Dolores 14030003 San Miguel 14030004 Lower Dolores 14030005 Upper Colorado-Kane Springs 1404 Great Divide - Upper Green 140401 Upper Green 14040101 Upper Green 14040102 New Fork 14040103 Upper Green-Slate 14040104 Big Sandy 14040105 Bitter 14040106 Upper Green-Flaming Gorge Reservoir 14040107 Blacks Fork 14040108 Muddy 14040109 Vermilion 140402 Great Divide closed basin 14040200 Great Divide closed basin 1405 White-Yampa 140500 White - Yampa 14050001 Upper Yampa 14050002 Lower Yampa 14050003 Little Snake 14050004 Muddy 14050005 Upper White 14050006 Piceance-Yellow 14050007 Lower White 1406 Lower Green 140600 Lower Green 14060001 Lower Green-Diamond 14060002 Ashley-Brush 14060003 Duchesne 14060004 Strawberry 14060005 Lower Green-Desolation Canyon 14060006 Willow 14060007 Price 14060008 Lower Green 14060009 San Rafael 1407 Upper Colorado-Dirty Devil 140700 Upper Colorado-Dirty Devil 14070001 Upper Lake Powell 14070002 Muddy 14070003 Fremont 14070004 Dirty Devil 14070005 Escalante 14070006 Lower Lake Powell 14070007 Paria 1408 San Juan 140801 Upper San Juan 14080101 Upper San Juan 14080102 Piedra 14080103 Blanco Canyon 14080104 Animas 14080105 Middle San Juan 14080106 Chaco 14080107 Mancos 140802 Lower San Juan 14080201 Lower San Juan-Four Corners 14080202 Mcelmo 14080203 Montezuma 14080204 Chinle 14080205 Lower San Juan 15 Lower Colorado 1501 Lower Colorado-Lake Mead 150100 Lower Colorado-Lake Mead 15010001 Lower Colorado-Marble Canyon 15010002 Grand Canyon 15010003 Kanab 15010004 Havasu Canyon 15010005 Lake Mead 15010006 Grand Wash 15010007 Hualapai Wash 15010008 Upper Virgin 15010009 Fort Pierce Wash 15010010 Lower Virgin 15010011 White 15010012 Muddy 15010013 Meadow Valley Wash 15010014 Detrital Wash 15010015 Las Vegas Wash 1502 Little Colorado 150200 Little Colorado 15020001 Little Colorado headwaters 15020002 Upper Little Colorado 15020003 Carrizo Wash 15020004 Zuni 15020005 Silver 15020006 Upper Puerco 15020007 Lower Puerco 15020008 Middle Little Colorado 15020009 Leroux Wash 15020010 Chevelon Canyon 15020011 Cottonwood Wash 15020012 Corn-Oraibi 15020013 Polacca Wash 15020014 Jadito Wash 15020015 Canyon Diablo 15020016 Lower Little Colorado 15020017 Dinnebito Wash 15020018 Moenkopi Wash 1503 Lower Colorado 150301 Lower Colorado 15030101 Havasu-Mohave Lakes 15030102 Piute Wash 15030103 Sacramento Wash 15030104 Imperial Reservoir 15030105 Bouse Wash 15030106 Tyson Wash 15030107 Lower Colorado 15030108 Yuma Desert 150302 Bill Williams 15030201 Big Sandy 15030202 Burro 15030203 Santa Maria 15030204 Bill Williams 1504 Upper Gila 150400 Upper Gila 15040001 Upper Gila 15040002 Upper Gila-Mangas 15040003 Animas Valley 15040004 San Francisco 15040005 Upper Gila-San Carlos Reservoir 15040006 San Simon 15040007 San Carlos 1505 Middle Gila 150501 Middle Gila 15050100 Middle Gila 150502 San Pedro-Willcox 15050201 Willcox Playa 15050202 Upper San Pedro 15050203 Lower San Pedro 150503 Santa Cruz 15050301 Upper Santa Cruz 15050302 Rillito 15050303 Lower Santa Cruz 15050304 Brawley Wash 15050305 Aguirre Valley 15050306 Santa Rosa Wash 1506 Salt 150601 Salt 15060101 Black 15060102 White 15060103 Upper Salt 15060104 Carrizo 15060105 Tonto 15060106 Lower Salt 150602 Verde 15060201 Big Chino-Williamson Valley 15060202 Upper Verde 15060203 Lower Verde 1507 Lower Gila 150701 Lower Gila-Agua Fria 15070101 Lower Gila-Painted Rock Reservoir 15070102 Agua Fria 15070103 Hassayampa 15070104 Centennial Wash 150702 Lower Gila 15070201 Lower Gila 15070202 Tenmile Wash 15070203 San Cristobal Wash 1508 Sonora 150801 Rio Sonoyta 15080101 San Simon Wash 15080102 Rio Sonoyta 15080103 Tule Desert 150802 Rio De La Concepcion 15080200 Rio De La Concepcion 150803 Rio De Bavispe 15080301 Whitewater Draw 15080302 San Bernardino Valley 15080303 Cloverdale 16 Great Basin 1601 Bear 160101 Upper Bear 16010101 Upper Bear 16010102 Central Bear 160102 Lower Bear 16010201 Bear Lake 16010202 Middle Bear 16010203 Little Bear-Logan 16010204 Lower Bear-Malad 1602 Great Salt Lake 160201 Weber 16020101 Upper Weber 16020102 Lower Weber 160202 Jordan 16020201 Utah Lake 16020202 Spanish Fork 16020203 Provo 16020204 Jordan 160203 Great Salt Lake 16020301 Hamlin-Snake Valleys 16020302 Pine Valley 16020303 Tule Valley 16020304 Rush-Tooele Valleys 16020305 Skull Valley 16020306 Southern Great Salt Lake Desert 16020307 Pilot-Thousand Springs 16020308 Northern Great Salt Lake Desert 16020309 Curlew Valley 16020310 Great Salt Lake 1603 Escalante Desert-Sevier Lake 160300 Escalante Desert-Sevier Lake 16030001 Upper Sevier 16030002 East Fork Sevier 16030003 Middle Sevier 16030004 San Pitch 16030005 Lower Sevier 16030006 Escalante Desert 16030007 Beaver Bottoms-Upper Beaver 16030008 Lower Beaver 16030009 Sevier Lake 1604 Black Rock Desert-Humboldt 160401 Humboldt 16040101 Upper Humboldt 16040102 North Fork Humboldt 16040103 South Fork Humboldt 16040104 Pine 16040105 Middle Humboldt 16040106 Rock 16040107 Reese 16040108 Lower Humboldt 16040109 Little Humboldt 160402 Black Rock Desert 16040201 Upper Quinn 16040202 Lower Quinn 16040203 Smoke Creek Desert 16040204 Massacre Lake 16040205 Thousand-Virgin 1605 Central Lahontan 160501 Truckee 16050101 Lake Tahoe 16050102 Truckee 16050103 Pyramid-Winnemucca Lakes 16050104 Granite Springs Valley 160502 Carson 16050201 Upper Carson 16050202 Middle Carson 16050203 Carson Desert 160503 Walker 16050301 East Walker 16050302 West Walker 16050303 Walker 16050304 Walker Lake 1606 Central Nevada Desert Basins 160600 Central Nevada Desert Basins 16060001 Dixie Valley 16060002 Gabbs Valley 16060003 Southern Big Smoky Valley 16060004 Northern Big Smoky Valley 16060005 Diamond-Monitor Valleys 16060006 Little Smoky-Newark Valleys 16060007 Long-Ruby Valleys 16060008 Spring-Steptoe Valleys 16060009 Dry Lake Valley 16060010 Fish Lake-Soda Spring Valleys 16060011 Ralston-Stone Cabin Valleys 16060012 Hot Creek-Railroad Valleys 16060013 Cactus-Sarcobatus Flats 16060014 Sand Spring-Tikaboo Valleys 16060015 Ivanpah-Pahrump Valleys 17 Pacific Northwest 1701 Kootenai-Pend Oreille-Spokane 170101 Kootenai 17010101 Upper Kootenai 17010102 Fisher 17010103 Yaak 17010104 Lower Kootenai 17010105 Moyie 170102 Pend Oreille 17010201 Upper Clark Fork 17010202 Flint-Rock 17010203 Blackfoot 17010204 Middle Clark Fork 17010205 Bitterroot 17010206 North Fork Flathead 17010207 Middle Fork Flathead 17010208 Flathead Lake 17010209 South Fork Flathead 17010210 Stillwater 17010211 Swan 17010212 Lower Flathead 17010213 Lower Clark Fork 17010214 Pend Oreille Lake 17010215 Priest 17010216 Pend Oreille 170103 Spokane 17010301 Upper Coeur D'alene 17010302 South Fork Coeur D'alene 17010303 Coeur D'alene Lake 17010304 St. Joe 17010305 Upper Spokane 17010306 Hangman 17010307 Lower Spokane 17010308 Little Spokane 1702 Upper Columbia 170200 Upper Columbia 17020001 Franklin D. Roosevelt Lake 17020002 Kettle 17020003 Colville 17020004 Sanpoil 17020005 Chief Joseph 17020006 Okanogan 17020007 Similkameen 17020008 Methow 17020009 Lake Chelan 17020010 Upper Columbia-Entiat 17020011 Wenatchee 17020012 Moses Coulee 17020013 Upper Crab 17020014 Banks Lake 17020015 Lower Crab 17020016 Upper Columbia-Priest Rapids 1703 Yakima 170300 Yakima 17030001 Upper Yakima 17030002 Naches 17030003 Lower Yakima, Washington 1704 Upper Snake 170401 Snake headwaters 17040101 Snake headwaters 17040102 Gros Ventre 17040103 Greys-Hobock 17040104 Palisades 17040105 Salt 170402 Upper Snake 17040201 Idaho Falls 17040202 Upper Henrys 17040203 Lower Henrys 17040204 Teton 17040205 Willow 17040206 American Falls 17040207 Blackfoot 17040208 Portneuf 17040209 Lake Walcott 17040210 Raft 17040211 Goose 17040212 Upper Snake-Rock 17040213 Salmon Falls 17040214 Beaver-Camas 17040215 Medicine Lodge 17040216 Birch 17040217 Little Lost 17040218 Big Lost 17040219 Big Wood 17040220 Camas 17040221 Little Wood 1705 Middle Snake 170501 Middle Snake-Boise 17050101 C. J. Idaho 17050102 Bruneau 17050103 Middle Snake-Succor 17050104 Upper Owyhee 17050105 South Fork Owyhee 17050106 East Little Owyhee. Nevada, 17050107 Middle Owyhee 17050108 Jordan 17050109 Crooked-Rattlesnake 17050110 Lower Owyhee 17050111 North and Middle Forks Boise 17050112 Boise-Mores 17050113 South Fork Boise 17050114 Lower Boise 17050115 Middle Snake-Payette 17050116 Upper Malheur 17050117 Lower Malheur 17050118 Bully 17050119 Willow 17050120 South Fork Payette 17050121 Middle Fork Payette 17050122 Payette 17050123 North Fork Payette 17050124 Weiser 170502 Middle Snake-Powder 17050201 Brownlee Reservoir 17050202 Burnt 17050203 Powder 1706 Lower Snake 170601 Lower Snake 17060101 Hells Canyon 17060102 Imnaha 17060103 Lower Snake-Asotin 17060104 Upper Grande Ronde 17060105 Wallowa 17060106 Lower Grande Ronde 17060107 Lower Snake-Tucannon 17060108 Palouse 17060109 Rock 17060110 Lower Snake 170602 Salmon 17060201 Upper Salmon 17060202 Pahsimeroi 17060203 Middle Salmon-Panther 17060204 Lemhi 17060205 Upper Middle Fork Salmon 17060206 Lower Middle Fork Salmon 17060207 Middle Salmon-Chamberlain 17060208 South Fork Salmon 17060209 Lower Salmon 17060210 Little Salmon 170603 Clearwater 17060301 Upper Selway 17060302 Lower Selway 17060303 Lochsa 17060304 Middle Fork Clearwater 17060305 South Fork Clearwater 17060306 Clearwater 17060307 Upper North Fork Clearwater 17060308 Lower North Fork Clearwater 1707 Middle Columbia 170701 Middle Columbia 17070101 Middle Columbia-Lake Wallula 17070102 Walla Walla 17070103 Umatilla 17070104 Willow 17070105 Middle Columbia-Hood 17070106 Klickitat 170702 John Day 17070201 Upper John Day 17070202 North Fork John Day 17070203 Middle Fork John Day 17070204 Lower John Day 170703 Deschutes 17070301 Upper Deschutes 17070302 Little Deschutes 17070303 Beaver-South Fork 17070304 Upper Crooked 17070305 Lower Crooked 17070306 Lower Deschutes 17070307 Trout 1708 Lower Columbia 170800 Lower Columbia 17080001 Lower Columbia-Sandy 17080002 Lewis 17080003 Lower Columbia-Clatskanie 17080004 Upper Cowlitz 17080005 Lower Cowlitz 17080006 Lower Columbia 1709 Willamette 170900 Willamette 17090001 Middle Fork Willamette 17090002 Coast Fork Willamette 17090003 Upper Willamette 17090004 Mckenzie 17090005 North Santiam 17090006 South Santiam 17090007 Middle Willamette 17090008 Yamhill 17090009 Molalla-Pudding 17090010 Tualatin 17090011 Clackamas 17090012 Lower Willamette 1710 Oregon-Washington Coastal 171001 Washington Coastal 17100101 Hoh-Quillayute 17100102 Queets-Quinault 17100103 Upper Chehalis 17100104 Lower Chehalis 17100105 Grays Harbor 17100106 Willapa Bay 171002 Northern Oregon Coastal 17100201 Necanicum 17100202 Nehalem 17100203 Wilson-Trask-Nestucca 17100204 Siletz-Yaquina 17100205 Alsea 17100206 Siuslaw 17100207 Siltcoos 171003 Southern Oregon Coastal 17100301 North Umpqua 17100302 South Umpqua 17100303 Umpqua 17100304 Coos 17100305 Coquille 17100306 Sixes 17100307 Upper Rogue 17100308 Middle Rogue 17100309 Applegate 17100310 Lower Rogue 17100311 Illinois 17100312 Chetco 1711 Puget Sound 171100 Puget Sound 17110001 Fraser 17110002 Strait of Georgia 17110003 San Juan Islands 17110004 Nooksack 17110005 Upper Skagit 17110006 Sauk 17110007 Lower Skagit 17110008 Stillaguamish 17110009 Skykomish 17110010 Snoqualmie 17110011 Snohomish 17110012 Lake Washington 17110013 Duwamish 17110014 Puyallup 17110015 Nisqually 17110016 Deschutes 17110017 Skokomish 17110018 Hood Canal 17110019 Puget Sound 17110020 Dungeness-Elwha 17110021 Crescent-Hoko 1712 Oregon closed basins 171200 Oregon closed basins 17120001 Harney-Malheur Lakes 17120002 Silvies 17120003 Donner Und Blitzen 17120004 Silver 17120005 Summer Lake 17120006 Lake Abert 17120007 Warner Lakes 17120008 Guano 17120009 Alvord Lake 18 California 1801 Klamath-Northern California Coastal 180101 Northern California Coastal 18010101 Smith 18010102 Mad-Redwood 18010103 Upper Eel 18010104 Middle Fork Eel 18010105 Lower Eel 18010106 South Fork Eel 18010107 Mattole 18010108 Big-Navarro-Garcia 18010109 Gualala-Salmon 18010110 Russian 18010111 Bodega Bay 180102 Klamath 18010201 Williamson 18010202 Sprague 18010203 Upper Klamath Lake 18010204 Lost 18010205 Butte 18010206 Upper Klamath 18010207 Shasta 18010208 Scott 18010209 Lower Klamath 18010210 Salmon 18010211 Trinity 18010212 South Fork Trinity 1802 Sacramento 180200 Upper Sacramento 18020001 Goose Lake 18020002 Upper Pit 18020003 Lower Pit 18020004 Mccloud 18020005 Sacramento headwaters 180201 Lower Sacramento 18020101 Sacramento-Lower Cow-Lower Clear 18020102 Lower Cottonwood 18020103 Sacramento-Lower Thomes 18020104 Sacramento-Stone Corral 18020105 Lower Butte 18020106 Lower Feather 18020107 Lower Yuba 18020108 Lower Bear 18020109 Lower Sacramento 18020110 Lower Cache 18020111 Lower American 18020112 Sacramento-Upper Clear 18020113 Cottonwood headwaters 18020114 Upper Elder-Upper Thomes 18020115 Upper Stony 18020116 Upper Cache 18020117 Upper Putah 18020118 Upper Cow-Battle 18020119 Mill-Big Chico 18020120 Upper Butte 18020121 North Fork Feather 18020122 East Branch North Fork Feather 18020123 Middle Fork Feather 18020124 Honcut headwaters 18020125 Upper Yuba 18020126 Upper Bear 18020127 Upper Coon-Upper Auburn 18020128 North Fork American 18020129 South Fork American 1803 Tulare-Buena Vista Lakes 180300 Tulare-Buena Vista Lakes 18030001 Upper Kern 18030002 South Fork Kern 18030003 Middle Kern-Upper Tehachapi- 18030004 Upper Poso 18030005 Upper Deer-Upper White 18030006 Upper Tule 18030007 Upper Kaweah 18030008 Mill 18030009 Upper Dry 18030010 Upper King 18030011 Upper Los Gatos-Avenal 18030012 Tulare-Buena Vista Lakes 1804 San Joaquin 180400 San Joaquin 18040001 Middle San Joaquin-Lower 18040002 Middle San Joaquin-Lower 18040003 San Joaquin Delta 18040004 Lower Calaveras-Mormon Slough 18040005 Lower Cosumnes-Lower Mokelumne 18040006 Upper San Joaquin 18040007 Upper Chowchilla-Upper Fresno 18040008 Upper Merced 18040009 Upper Tuolumne 18040010 Upper Stanislaus 18040011 Upper Calaveras 18040012 Upper Mokelumne 18040013 Upper Cosumnes 18040014 Panoche-San Luis Reservoir 1805 San Francisco Bay 180500 San Francisco Bay 18050001 Suisun Bay 18050002 San Pablo Bay 18050003 Coyote 18050004 San Francisco Bay 18050005 Tomales-Drake Bays 18050006 San Francisco Coastal South 1806 Central California Coastal 180600 Central California Coastal 18060001 San Lorenzo-Soquel 18060002 Pajaro 18060003 Carrizo Plain 18060004 Estrella 18060005 Salinas 18060006 Central Coastal 18060007 Cuyama 18060008 Santa Maria 18060009 San Antonio 18060010 Santa Ynez 18060011 Alisal-Elkhorn Sloughs 18060012 Carmel 18060013 Santa Barbara Coastal 18060014 Santa Barbara Channel Islands 1807 Southern California Coastal 180701 Ventura-San Gabriel Coastal 18070101 Ventura 18070102 Santa Clara 18070103 Calleguas 18070104 Santa Monica Bay 18070105 Los Angeles 18070106 San Gabriel 18070107 San Pedro Channel Islands 180702 Santa Ana 18070201 Seal Beach 18070202 San Jacinto 18070203 Santa Ana 18070204 Newport Bay 180703 Laguna-San Diego Coastal 18070301 Aliso-San Onofre 18070302 Santa Margarita 18070303 San Luis Rey-Escondido 18070304 San Diego 18070305 Cottonwood-Tijuana 1808 North Lahontan 180800 North Lahontan 18080001 Surprise Valley 18080002 Madeline Plains 18080003 Honey-Eagle Lakes 1809 Northern Mojave-Mono Lake 180901 Mono-Owens Lakes 18090101 Mono Lake 18090102 Crowley Lake 18090103 Owens Lake 180902 Northern Mojave 18090201 Eureka-Saline Valleys 18090202 Upper Amargosa 18090203 Death Valley-Lower Amargosa 18090204 Panamint Valley 18090205 Indian Wells-Searles Valleys 18090206 Antelope-Fremont Valleys 18090207 Coyote-Cuddeback Lakes 18090208 Mojave 1810 Southern Mojave-Salton Sea 181001 Southern Mojave 18100100 Southern Mojave 181002 Salton Sea 18100200 Salton Sea 19 Alaska 1901 Southeast 190101 Southern Southeast 19010101 Southeast Mainland 19010102 Ketchikan 19010103 Prince of Wales 190102 Central Southeast 19010201 Mainland 19010202 Kuiu-Kupreanof-Mitkof-Etolin-Zarembo-Wrangell Islands 19010203 Baranof-Chichagof Islands 19010204 Admiralty Island 190103 Northern Southeast 19010301 Lynn Canal 19010302 Glacier Bay 19010303 Chilkat-Skagway Rivers 190104 Gulf of Alaska 19010401 Yakutat Bay 19010402 Bering Glacier 1902 Southcentral 190201 Copper River 19020101 Upper Copper River 19020102 Middle Copper River 19020103 Chitina River 19020104 Lower Copper River 190202 Prince William Sound 19020201 Eastern Prince William Sound 19020202 Western Prince William Sound 190203 Kenai Peninsula 19020301 Lower Kenai Peninsula 19020302 Upper Kenai Peninsula 190204 Knik Arm 19020401 Anchorage 19020402 Matansuka 190205 Susitna River 19020501 Upper Susitna River 19020502 Chulitna River 19020503 Talkeetna River 19020504 Yentna River 19020505 Lower Susitna River 190206 Western Cook Inlet 19020601 Redoubt-Trading Bays 19020602 Tuxdeni-Kamishak Bays 190207 Kodiak-Shelikof 19020701 Kodiak-Afognak Islands 19020702 Shelikof Straight 1903 Southwest 190301 Aleutian Islands 19030101 Cold Bay 19030102 Fox Islands 19030103 Western Aleutian 19030104 Pribilof Islands 190302 Kvichak-Port Heiden 19030201 Port Heiden 19030202 Ugashik Bay 19030203 Egegik Bay 19030204 Naknek 19030205 Lake Clark 19030206 Lake Iliamna 190303 Nushagak River 19030301 Upper Nushagak River 19030302 Mulchatna River 19030303 Lower Nushagak River 19030304 Wood River 19030305 Togiak 190304 Upper Kuskokwim River 19030401 North Fork Kuskokwim River 19030402 Farewll Lake 19030403 Takotna River 19030404 Holitna River 19030405 Stony River 190305 Lower Kuskokwim River 19030501 Aniak 19030502 Kuskokwim Delta 19030503 Nunavak-St. Matthew Islands 1904 Yukon 190401 Canada 19040101 White River 19040102 Ladue River 19040103 Sixtymile River 19040104 Fortymile River 190402 Porcupine River 19040201 Old Crow River 19040202 Coleen River 19040203 Sheenjek River 19040204 Black River 19040205 Porcupine Flats 190403 Chandalar-Christian Rivers 19040301 Middle Fork-North Fork Chandalar Rivers 19040302 East Fork Chandalar River 19040303 Christian River 19040304 Lower Chandalar River 190404 Upper Yukon River 19040401 Eagle To Circle 19040402 Birch-Beaver Creeks 19040403 Yukon Flats 19040404 Ramparts 190405 Tanana River 19040501 Nebesna-Chisana Rivers 19040502 Tok 19040503 Healy Lake 19040504 Delta River 19040505 Salcha River 19040506 Chena River 19040507 Tanana River 19040508 Nenana River 19040509 Tolovana River 19040510 Kantishna River 19040511 Lower Tanana River 190406 Koyukuk River 19040601 Upper Koyukuk River 19040602 South Fork Koyukuk River 19040603 Alatna River 19040604 Kanuti River 19040605 Allakaket River 19040606 Huslia River 19040607 Dulbi River 19040608 Koyukuk Flats 19040609 Kateel River 190407 Central Yukon 19040701 Tozitna River 19040702 Nowitna River 19040703 Melozitna River 19040704 Ramparts to Ruby 19040705 Galena 190408 Lower Yukon 19040801 Anvik River 19040802 Upper Innoko River 19040803 Lower Innoko River 19040804 Anvik to Pilot Station 19040805 Yukon Delta 1905 Northwest 190501 Norton Sound 19050101 St. Lawrence Island 19050102 Unalakleet 19050103 Norton Bay 19050104 Nome 19050105 Imuruk Basin 190502 Northern Seward Peninsula 19050201 Shishmaref 19050202 Goodhope-Spafarief Bay 19050203 Buckland River 190503 Kobuk-Selawik Rivers 19050301 Selawik Lake 19050302 Upper Kobuk River 19050303 Middle Kobuk River 19050304 Lower Kobuk River 190504 Noatak River-Lisburne Peninsula 19050401 Upper Noatak River 19050402 Middle Noatak River 19050403 Lower Noatak River 19050404 Wulik-Kivalina Rivers 19050405 Lisburne Peninsula 1906 Arctic 190601 Western Arctic 19060101 Kukpowruk River 19060102 Kokolik River 19060103 Utukok River 190602 Barrow 19060201 Kuk River 19060202 Northwest Coast 19060203 Meade River 19060204 Ikpikpuk River 19060205 Harrison Bay 190603 Colville River 19060301 Upper Colville River 19060302 Killik River 19060303 Chandler-Anaktuvuk Rivers 19060304 Lower Colville River 190604 Prudhoe Bay 19060401 Kuparuk River 19060402 Sagavanirktok River 19060403 Mikkelson Bay 190605 Eastern Arctic 19060501 Canning River 19060502 Camden Bay 19060503 Beaufort Lagoon 20 Hawaii 2001 Hawaii 200100 Hawaii 20010000 Hawaii 2002 Maui 200200 Maui 20020000 Maui 2003 Kahoolawe 200300 Kahoolawe 20030000 Kahoolawe 2004 Lanai 200400 Lanai 20040000 Lanai 2005 Molokai 200500 Molokai 20050000 Molokai 2006 Oahu 200600 Oahu 20060000 Oahu 2007 Kauai 200700 Kauai 20070000 Kauai 2008 Niihau 200800 Niihau 20080000 Niihau 2009 Northwestern Hawaiian Islands 200900 Northwestern Hawaiian Islands 20090000 Northwestern Hawaiian Islands 21 Caribbean 2101 Puerto Rico 210100 Puerto Rico 21010001 Interior Puerto Rico 21010002 Cibuco-Guajataca 21010003 Culebrinas-Guanajibo 21010004 Southern Puerto Rico 21010005 Eastern Puerto Rico 21010006 Puerto Rican Islands 2102 Virgin Islands 210200 Virgin Islands 21020001 St. John-St. Thomas 21020002 St. Croix 2103 Caribbean Outlying Areas 210300 Caribbean Outlying Areas 21030001 Canal Zone 21030002 Navassa 21030003 Roncador-Serrana </pre> </td> </tr> </tbody> </table> <div align="center" class="bottombox"><span class="info"></span></div> <!--#include virtual="/inc/footer_water.html" --> <!--#include virtual="/inc/footer.html" --> </body> </html>"""
22.758704
71
0.862563
8,869
66,023
6.421017
0.519788
0.005847
0.001141
0.000948
0.001106
0
0
0
0
0
0
0.370369
0.127138
66,023
2,901
72
22.758704
0.617814
0
0
0
0
0
0.999743
0.010208
0
0
0
0
0
1
0
false
0.000689
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ffcb3d4ae93d16e64ee84a0ee759e7d7e0280fea
408
py
Python
CombatEncounterInterface.py
justinbeetle/pyDragonWarrior
cfaf57161ab4950da537de9937d688bc7d24bf4a
[ "MIT" ]
3
2021-04-07T14:43:20.000Z
2021-04-17T21:26:08.000Z
CombatEncounterInterface.py
justinbeetle/pyDragonWarrior
cfaf57161ab4950da537de9937d688bc7d24bf4a
[ "MIT" ]
1
2022-01-02T15:52:23.000Z
2022-01-12T01:51:50.000Z
CombatEncounterInterface.py
justinbeetle/pyDragonWarrior
cfaf57161ab4950da537de9937d688bc7d24bf4a
[ "MIT" ]
null
null
null
#!/usr/bin/env python # Imports to support type annotations from typing import List import abc from CombatCharacterState import CombatCharacterState class CombatEncounterInterface(metaclass=abc.ABCMeta): @abc.abstractmethod def render_monsters(self) -> None: pass @abc.abstractmethod def render_damage_to_targets(self, targets: List[CombatCharacterState]) -> None: pass
21.473684
84
0.752451
45
408
6.733333
0.622222
0.112211
0.132013
0.171617
0
0
0
0
0
0
0
0
0.176471
408
18
85
22.666667
0.901786
0.137255
0
0.4
0
0
0
0
0
0
0
0
0
1
0.2
false
0.2
0.3
0
0.6
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
ffe18410ff740e87c6a649239a15ef7f4f7382d4
113
py
Python
examples/_attic/twitter/__init__.py
hiway/python-zentropi
006f4a6de8b6691477fa1416476cd6cef665c918
[ "Apache-2.0" ]
5
2017-05-28T18:15:38.000Z
2021-07-15T22:31:33.000Z
examples/_attic/twitter/__init__.py
hiway/python-zentropi
006f4a6de8b6691477fa1416476cd6cef665c918
[ "Apache-2.0" ]
null
null
null
examples/_attic/twitter/__init__.py
hiway/python-zentropi
006f4a6de8b6691477fa1416476cd6cef665c918
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # coding=utf-8 from .twitter_agent import TwitterAgent __all__ = [ 'TwitterAgent', ]
12.555556
39
0.699115
14
113
5.285714
0.928571
0
0
0
0
0
0
0
0
0
0
0.010638
0.168142
113
8
40
14.125
0.776596
0.292035
0
0
0
0
0.153846
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ffe71881ab77af602c49de6772155a57933140e1
6,298
py
Python
src/pyphoplacecellanalysis/PhoPositionalData/import_data.py
CommanderPho/pyPhoPlaceCellAnalysis
6e6a5cd9c0f2abbe6a367d4c87299fcd01c750a6
[ "MIT" ]
null
null
null
src/pyphoplacecellanalysis/PhoPositionalData/import_data.py
CommanderPho/pyPhoPlaceCellAnalysis
6e6a5cd9c0f2abbe6a367d4c87299fcd01c750a6
[ "MIT" ]
null
null
null
src/pyphoplacecellanalysis/PhoPositionalData/import_data.py
CommanderPho/pyPhoPlaceCellAnalysis
6e6a5cd9c0f2abbe6a367d4c87299fcd01c750a6
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Sat Oct 9 12:33:48 2021 @author: Pho """ import numpy as np import pandas as pd from pathlib import Path from pyphoplacecellanalysis.PhoPositionalData.load_exported import import_mat_file # from pyphoplacecellanalysis.PhoPositionalData.process_data import process_positionalAnalysis_data, gen_2d_histrogram, get_heatmap_color_vectors, process_chunk_equal_poritions_data, extract_spike_timeseries from pyphoplacecellanalysis.PhoPositionalData.process_data import process_positionalAnalysis_data, extract_spike_timeseries def build_spike_positions_list(spike_list, t, x, y): """Interpolate the positions that a spike occurred for each spike timestamp Args: spike_list ([type]): [description] t ([type]): [description] x ([type]): [description] y ([type]): [description] """ # Determine the x and y positions each spike occured for each cell num_cells = len(spike_list) spike_positions_list = list() for cell_id in np.arange(num_cells): spike_positions_list.append(np.vstack((np.interp(spike_list[cell_id], t, x), np.interp(spike_list[cell_id], t, y)))) # spike_positions_list.append(np.hstack(x[spike_list[cell_id]], y[spike_list[cell_id]])) # spike_speed = speeds[spike_list[cell_id]] return spike_positions_list ## TO REMOVE: def build_cellID_reverse_lookup_map(cell_ids): # Allows reverse indexing into the linear imported array using the original cell ID indicies flat_cell_ids = [int(cell_id) for cell_id in cell_ids] # ensures integer indexes for IDs linear_flitered_ids = np.arange(len(cell_ids)) return dict(zip(flat_cell_ids, linear_flitered_ids)) def perform_import_spikes(t, x, y, mat_import_parent_path=Path(r'C:\Share\data\RoyMaze1')): # Import the spikes # spikes_mat_import_file = mat_import_parent_path.joinpath('spikesTable.mat') spikes_mat_import_file = mat_import_parent_path.joinpath('ExportedData', 'spikesAnalysis.mat') spikes_data = import_mat_file(mat_import_file=spikes_mat_import_file) # print(spikes_data.keys()) spike_matrix = spikes_data['spike_matrix'] spike_cells = spikes_data['spike_cells'][0] cell_ids = spikes_data['spike_cells_ids'][:,0].T flat_cell_ids = [int(cell_id) for cell_id in cell_ids] # print('spike_matrix: {}, spike_cells: {}'.format(np.shape(spike_matrix), np.shape(spike_cells))) num_cells = np.shape(spike_matrix)[0] # extract_spike_timeseries(spike_cells[8]) spike_list = [extract_spike_timeseries(spike_cell) for spike_cell in spike_cells] # print(spike_list[0]) # print('np.shape(cell_ids): {}, cell_ids: {}'.format(np.shape(cell_ids), cell_ids)) # Determine the x and y positions each spike occured for each cell spike_positions_list = build_spike_positions_list(spike_list, t, x, y) # print(np.shape(spike_positions_list[0])) # (2, 9297) # reverse_cellID_idx_lookup_map: Allows reverse indexing into the linear imported array using the original cell ID indicies reverse_cellID_idx_lookup_map = build_cellID_reverse_lookup_map(cell_ids) return spike_matrix, spike_cells, num_cells, spike_list, spike_positions_list, flat_cell_ids, reverse_cellID_idx_lookup_map def perform_import_positions(mat_import_parent_path=Path(r'C:\Share\data\RoyMaze1')): position_mat_import_file = mat_import_parent_path.joinpath('ExportedData','positionAnalysis.mat') data = import_mat_file(mat_import_file=position_mat_import_file) # # KevinMaze1 # mat_import_file = 'data/Kevin-maze1/PreFinalBinningResultWholeEnv.mat' # data = import_mat_file(mat_import_file=mat_import_file) # # 'spikeStruct' # # 'speed','t','v' # 3243057×1 # # Achilles: # main_data_path = Path(r'C:\Users\Pho\repos\PhoPy3DPositionAnalysis2021\data\Achilles_10252013') # positionFilename = main_data_path.joinpath('ExportedData', 'Achilles_10252013_position.mat') # positionFileFullpath = main_data_path.joinpath(positionFilename) # build a full filepath from the basepath # data = import_mat_file(mat_import_file=positionFileFullpath) # # print('data:'.format(data.keys())) # data # Get the position data: t,x,y,speeds,dt,dx,dy = process_positionalAnalysis_data(data) print('shapes - t: {}, x: {}, y: {}'.format(np.shape(t), np.shape(x), np.shape(y))) # extrema_min, extrema_max = np.nanmin(x), np.nanmax(x) # print('for x: extrema_min: {}, extrema_max: {}'.format(extrema_min, extrema_max)) # extrema_min, extrema_max = np.nanmin(y), np.nanmax(y) # print('for y: extrema_min: {}, extrema_max: {}'.format(extrema_min, extrema_max)) return t,x,y,speeds,dt,dx,dy def perform_import_extras(mat_import_parent_path=Path(r'C:\Share\data\RoyMaze1')): extras_mat_import_file = mat_import_parent_path.joinpath('ExportedData','extrasAnalysis.mat') ripple_mat_import_file = mat_import_parent_path.joinpath('ExportedData', 'RippleManualExport.h5') #periods_mat_import_file = mat_import_parent_path.joinpath('ExportedData\BehavioralPeriodsManualExport.h5') periods_mat_import_file = mat_import_parent_path.joinpath('ExportedData', 'extrasAnalysis.mat') #source_data.behavior.RoyMaze1.list ripple_data = import_mat_file(mat_import_file=ripple_mat_import_file) ripple_times = ripple_data['time'] ripple_peak_times = ripple_data['peakTime'] ripple_peak_values = ripple_data['peakVal'] all_results_data = import_mat_file(mat_import_file=periods_mat_import_file) # behavioral_periods = all_results_data['behavioral_periods_table'] behavioral_periods = all_results_data['behavioral_periods'] behavioral_epochs = all_results_data['behavioral_epochs'] behavioral_epochs = pd.DataFrame(all_results_data['behavioral_epochs'], columns=['epoch_index','start_seconds_absolute','end_seconds_absolute','start_seconds','end_seconds','duration']) #['pre_sleep','track','post_sleep'] behavioral_periods = pd.DataFrame(all_results_data['behavioral_periods'], columns=['period_index','epoch_start_seconds','epoch_end_seconds','duration','type','behavioral_epoch']) return ripple_times, ripple_peak_times, ripple_peak_values, behavioral_periods, behavioral_epochs
49.984127
207
0.753255
867
6,298
5.113033
0.217993
0.058877
0.055718
0.04286
0.462215
0.409204
0.371306
0.289646
0.274758
0.151139
0
0.011215
0.136393
6,298
126
208
49.984127
0.80364
0.405049
0
0.042553
0
0
0.146601
0.029757
0
0
0
0
0
1
0.106383
false
0
0.361702
0
0.574468
0.021277
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
fffe74bcc00528c03dc923eac04425b4c24c614c
586
py
Python
extra_tests/test_recursion.py
olliemath/pypy
8b873bd0b8bf76075aba3d915c260789f26f5788
[ "Apache-2.0", "OpenSSL" ]
1
2021-06-02T23:02:09.000Z
2021-06-02T23:02:09.000Z
extra_tests/test_recursion.py
olliemath/pypy
8b873bd0b8bf76075aba3d915c260789f26f5788
[ "Apache-2.0", "OpenSSL" ]
1
2021-03-30T18:08:41.000Z
2021-03-30T18:08:41.000Z
extra_tests/test_recursion.py
olliemath/pypy
8b873bd0b8bf76075aba3d915c260789f26f5788
[ "Apache-2.0", "OpenSSL" ]
1
2022-03-30T11:42:37.000Z
2022-03-30T11:42:37.000Z
import sys def f(): try: return f() except RuntimeError: return sys.exc_info() def do_check(): f() assert sys.exc_info() == (None, None, None) def recurse(n): if n > 0: return recurse(n-1) else: return do_check() def test_recursion(): """ Test that sys.exc_info() is cleared after RecursionError was raised. The issue only appeared intermittently, depending on the contents of the call stack, hence the need for the recurse() helper to trigger it reliably. """ for i in range(50): recurse(i)
20.206897
79
0.616041
83
586
4.277108
0.626506
0.050704
0.084507
0
0
0
0
0
0
0
0
0.009547
0.284983
586
28
80
20.928571
0.837709
0.372014
0
0
0
0
0
0
0
0
0
0
0.058824
1
0.235294
false
0
0.058824
0
0.529412
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
ffff60238d237f20a52c03e47dbd8b823a44bfb2
1,169
py
Python
export_to_excel.py
salehjg/system_resource_logger
8f4725b6ee57167939b988126a5fe0d1157622ac
[ "MIT" ]
null
null
null
export_to_excel.py
salehjg/system_resource_logger
8f4725b6ee57167939b988126a5fe0d1157622ac
[ "MIT" ]
null
null
null
export_to_excel.py
salehjg/system_resource_logger
8f4725b6ee57167939b988126a5fe0d1157622ac
[ "MIT" ]
null
null
null
import xlwt import numpy as np from tqdm import tqdm from mine_logged_data import generate_cpu_core_used def convert2excel(): saved_log = np.load('data.npy') core_usage_cnt = generate_cpu_core_used(saved_log, 10.0) print 'shape: ', saved_log.shape wb = xlwt.Workbook() ws = wb.add_sheet('logged data') ws.write(0, 0, "minutes") ws.write(0, 1, "swap_used_GB") ws.write(0, 2, "swap_percent") ws.write(0, 3, "ram_used_GB") ws.write(0, 4, "ram_percent") for q in range(saved_log.shape[1] - 5): ws.write(0, 5 + q, ''.join(['CPU', str(q)])) ws.write(0, saved_log.shape[1], "used_core_count") for i in tqdm(range(saved_log.shape[0])): ws.write(i + 1, 0, saved_log[i, 0]) ws.write(i + 1, 1, saved_log[i, 0 + 1]) ws.write(i + 1, 2, saved_log[i, 1 + 1]) ws.write(i + 1, 3, saved_log[i, 2 + 1]) ws.write(i + 1, 4, saved_log[i, 3 + 1]) for q in range(5, saved_log.shape[-1]): ws.write(i + 1, q, saved_log[i, q]) ws.write(i + 1, saved_log.shape[1], core_usage_cnt[i]) wb.save('data.xls') def main(): convert2excel() main()
24.87234
62
0.585971
207
1,169
3.140097
0.256039
0.172308
0.086154
0.096923
0.135385
0
0
0
0
0
0
0.052095
0.244654
1,169
46
63
25.413043
0.684032
0
0
0
1
0
0.08982
0
0
0
0
0
0
0
null
null
0
0.129032
null
null
0.032258
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
0808d64f798e984f7a20af17c7a9684fb93ce278
129
py
Python
exdir/__init__.py
robertjoosten/exdir
340e648782b48b4313e4b6dcf6f35b856d2d0663
[ "MIT" ]
null
null
null
exdir/__init__.py
robertjoosten/exdir
340e648782b48b4313e4b6dcf6f35b856d2d0663
[ "MIT" ]
null
null
null
exdir/__init__.py
robertjoosten/exdir
340e648782b48b4313e4b6dcf6f35b856d2d0663
[ "MIT" ]
null
null
null
from exdir.directory import File __author__ = "Robert Joosten" __author_email__ = "rwm.joosten@gmail.com" __version__ = "1.0.0"
21.5
42
0.767442
18
129
4.777778
0.833333
0
0
0
0
0
0
0
0
0
0
0.026316
0.116279
129
5
43
25.8
0.72807
0
0
0
0
0
0.310078
0.162791
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
080eb5d01122d27bc837d22db5f5e67a7b506146
552
py
Python
service/src/resources/__init__.py
xuqiongkai/ALTER
a9ec98c0ed576a68f14711eeff6f1c7d2d34c6f7
[ "MIT" ]
8
2019-09-18T01:14:07.000Z
2022-02-05T03:43:24.000Z
service/src/resources/__init__.py
xuqiongkai/ALTER
a9ec98c0ed576a68f14711eeff6f1c7d2d34c6f7
[ "MIT" ]
4
2019-11-15T03:09:52.000Z
2022-03-24T15:01:55.000Z
service/src/resources/__init__.py
xuqiongkai/ALTER
a9ec98c0ed576a68f14711eeff6f1c7d2d34c6f7
[ "MIT" ]
4
2019-09-17T22:17:30.000Z
2022-02-05T03:43:28.000Z
# from .user import UserResource # from .user import UserRegistrationResource # from .user import UserLoginResource # from .task import TaskResource # from .job import JobResource from .user import * from .task import * from .job import * from .sentence import SentenceClassResource from .sentence import SentenceLmResource from .sentence import SentenceWmdResource from .sentence import SentenceMetricsResource from .word import WordClassResource from .recommend import RecommendLanguageModelResource from .recommend import RecommendSemanticResource
32.470588
53
0.838768
57
552
8.122807
0.350877
0.069114
0.12095
0
0
0
0
0
0
0
0
0
0.119565
552
16
54
34.5
0.952675
0.306159
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
08321ea00a20e2ff88bb3bf0b921e01618f2ccc0
385
py
Python
constants.py
finbourne/commissions-booking-script
818364e0becc7153d6f580bfe74b6608d7dce965
[ "MIT" ]
null
null
null
constants.py
finbourne/commissions-booking-script
818364e0becc7153d6f580bfe74b6608d7dce965
[ "MIT" ]
null
null
null
constants.py
finbourne/commissions-booking-script
818364e0becc7153d6f580bfe74b6608d7dce965
[ "MIT" ]
null
null
null
LINKING_PROPERTY = "Transaction/generated/LinkedTransactionId" INPUT_TXN_FILTER = "type in 'Buy','Purchase','Sell','FwdFxSell', 'FwdFxBuy','FxBuy','FxSell','StockIn'" ENTITY_PROPERTY = "Portfolio/test/Entity" BROKER_PROPERTY = "Portfolio/test/Broker" COUNTRY_PROPERTY = "Instrument/test/Country" PROPERTIES_REQUIRED = [ ENTITY_PROPERTY, BROKER_PROPERTY, COUNTRY_PROPERTY ]
35
103
0.771429
41
385
7
0.634146
0.097561
0.146341
0
0
0
0
0
0
0
0
0
0.096104
385
11
104
35
0.824713
0
0
0
1
0
0.487047
0.463731
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0843530074db0ac7e9033898bcf088bd5f2ce3f3
829
py
Python
1694-reformat-phone-number/1694-reformat-phone-number.py
marzy-bn/Leetcode_2022
07d6b9050279e82f610ed4a54209b33db3e3f8f9
[ "MIT" ]
null
null
null
1694-reformat-phone-number/1694-reformat-phone-number.py
marzy-bn/Leetcode_2022
07d6b9050279e82f610ed4a54209b33db3e3f8f9
[ "MIT" ]
null
null
null
1694-reformat-phone-number/1694-reformat-phone-number.py
marzy-bn/Leetcode_2022
07d6b9050279e82f610ed4a54209b33db3e3f8f9
[ "MIT" ]
null
null
null
class Solution: def reformatNumber(self, number: str) -> str: number = number.replace(' ','') number = number.replace('-','') output = '' count = len(number) remain = count % 3 print(remain) if remain == 0: last = number[len(number)-3:] end = len(number)-3 if remain == 2: last = number[len(number)-2:] end = len(number)-2 if remain == 1: last = number[len(number)-4:len(number)-2] + '-' + number[len(number)-2:] end = len(number)-4 for i,num in enumerate(number[:end]): output += num if (i+1)%3 == 0: output += '-' count -= 1 output += last return output
27.633333
85
0.428227
87
829
4.08046
0.310345
0.228169
0.169014
0.160563
0.157746
0.157746
0.157746
0
0
0
0
0.033827
0.429433
829
30
86
27.633333
0.716702
0
0
0
0
0
0.004819
0
0
0
0
0
0
1
0.041667
false
0
0
0
0.125
0.041667
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
084c752ab0d76e6654e5a40aa897c33d7dc5723a
1,499
py
Python
data_for_keras.py
ivanyu/kaggle-digit-recognizer
4eec8be465ece821c29f88f167bb5fa7cb3b2f42
[ "MIT" ]
1
2021-01-28T19:14:22.000Z
2021-01-28T19:14:22.000Z
data_for_keras.py
ivanyu/kaggle-digit-recognizer
4eec8be465ece821c29f88f167bb5fa7cb3b2f42
[ "MIT" ]
null
null
null
data_for_keras.py
ivanyu/kaggle-digit-recognizer
4eec8be465ece821c29f88f167bb5fa7cb3b2f42
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import os import numpy as np import scipy import scipy.misc import meta from meta import keras_data_filename from classify_base import load_data if not os.path.exists(meta.KERAS_DATA_DIR): os.mkdir(meta.KERAS_DATA_DIR) if not os.path.exists(keras_data_filename('original')): os.mkdir(keras_data_filename('original')) # if not os.path.exists(keras_data_filename('bin')): # os.mkdir(keras_data_filename('bin')) # if not os.path.exists(keras_data_filename('generator')): # os.mkdir(keras_data_filename('generator')) # for i in range(9 + 1): # if not os.path.exists(keras_data_filename('generator/{}'.format(i))): # os.mkdir(keras_data_filename('generator/{}'.format(i))) (X_train, y_train, _) = load_data(None) for i in range(X_train.shape[0]): fname = 'original/{0:07d}-{1}.npy'.format(i, y_train[i]) x = X_train[i,:] np.save(keras_data_filename(fname), x) # for i in range(X_train.shape[0]): # fname = 'bin/{0:07d}-{1}.bin'.format(i, y_train[i]) # x = X_train[i,:] # with open(keras_data_filename(fname), 'wb') as fh: # # fh.write(b'{0:s} {1:d} {2:d}\n'.format(x.dtype, *x.shape)) # fh.write(x.data) # fh.flush() # for i in range(X_train.shape[0]): # fname = 'generator/{0}/{1:07d}.jpg'.format(y_train[i], i) # x = X_train[i,:].reshape((meta.IMG_HEIGHT, meta.IMG_WIDTH)) # scipy.misc.imsave(keras_data_filename(fname), x)
34.068182
75
0.668446
250
1,499
3.808
0.272
0.132353
0.214286
0.057773
0.506303
0.380252
0.296218
0.296218
0.22479
0
0
0.016471
0.149433
1,499
43
76
34.860465
0.730196
0.593729
0
0
0
0
0.068027
0.040816
0
0
0
0
0
1
0
false
0
0.470588
0
0.470588
0.058824
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
f23b08c3090f1548a85589ab577b11d71d764906
684
py
Python
Desafio34yt.py
gsvleao/knowledge_hub.py
2f5d9e4fc0760ef7a96dc2619b3b0efdd17cd1a9
[ "MIT" ]
null
null
null
Desafio34yt.py
gsvleao/knowledge_hub.py
2f5d9e4fc0760ef7a96dc2619b3b0efdd17cd1a9
[ "MIT" ]
null
null
null
Desafio34yt.py
gsvleao/knowledge_hub.py
2f5d9e4fc0760ef7a96dc2619b3b0efdd17cd1a9
[ "MIT" ]
null
null
null
""" Exercício Python 034: Escreva um programa que pergunte o salário de um funcionário e calcule o valor do seu aumento. Para salários superiores a R$1.250,00, calcule um aumento de 10%. Para os inferiores ou iguais, o aumento é de 15%. """ print('=-' * 10, 'Desafio 34', '-=' * 10) salario = float(input('Qual o atual salário do funcionário? R$')) if salario > 1.250: print('Será atribuído uma aumento de 10% ao valor de R${:.3f}. O salário atualizado será de {:.3f}.'.format(salario, salario + (salario * 10 / 100))) else: print('Será atribuído uma aumento de 15% ao valor de R${:.3f}. O salário atualizado será de {:.3f}.'.format(salario, salario + (salario * 15 / 100)))
48.857143
153
0.685673
111
684
4.225225
0.459459
0.119403
0.046908
0.089552
0.405117
0.405117
0.277186
0.277186
0.277186
0.277186
0
0.072566
0.173977
684
14
154
48.857143
0.757522
0.339181
0
0
0
0.333333
0.532584
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
f23ce845cc3c04db71ea2bb20827304ee46f2bcf
285
py
Python
CursoEmVideo/Aula17/ex083.py
lucashsouza/Desafios-Python
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
[ "MIT" ]
null
null
null
CursoEmVideo/Aula17/ex083.py
lucashsouza/Desafios-Python
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
[ "MIT" ]
null
null
null
CursoEmVideo/Aula17/ex083.py
lucashsouza/Desafios-Python
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
[ "MIT" ]
null
null
null
print('=' * 30) print('{:^30}'.format('Expressão')) print('=' * 30) print('') e = str(input("Digite sua expressão: ")).strip() p1 = e.count("(") p2 = e.count(")") if p1 == p2: print("Sua expressão está correta !") else: print("Sua expressão está incorreta !")
19
49
0.550877
36
285
4.361111
0.5
0.133758
0.152866
0.267516
0
0
0
0
0
0
0
0.044444
0.210526
285
14
50
20.357143
0.653333
0
0
0.181818
0
0
0.365314
0
0
0
0
0
0
1
0
false
0
0
0
0
0.545455
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
f2497c58c99048fdb8faaf23cbe965d4822c4066
700
py
Python
tests/io/v3/vm/agents/test_agents_schema.py
alpesh-te/pyTenable
4b5381a7757561f7ac1e79c2e2679356dd533540
[ "MIT" ]
null
null
null
tests/io/v3/vm/agents/test_agents_schema.py
alpesh-te/pyTenable
4b5381a7757561f7ac1e79c2e2679356dd533540
[ "MIT" ]
25
2021-11-16T18:41:36.000Z
2022-03-25T05:43:31.000Z
tests/io/v3/vm/agents/test_agents_schema.py
alpesh-te/pyTenable
4b5381a7757561f7ac1e79c2e2679356dd533540
[ "MIT" ]
2
2022-03-02T12:24:40.000Z
2022-03-29T05:12:04.000Z
''' Testing the Agents schemas ''' from tenable.io.v3.vm.agents.schema import AgentSchema def test_agent_groups_schema_with_name(): ''' Test the agents schema with name ''' payload = { 'items': [ '57b74c0a-5d95-11ec-bf63-0242ac130002', '57b74e58-5d95-11ec-bf63-0242ac130002', '57b74f66-5d95-11ec-bf63-0242ac130002' ] } test_resp = { 'items': [ '57b74c0a-5d95-11ec-bf63-0242ac130002', '57b74e58-5d95-11ec-bf63-0242ac130002', '57b74f66-5d95-11ec-bf63-0242ac130002' ] } schema = AgentSchema(only=['items']) assert test_resp == schema.dump(schema.load(payload))
25.925926
57
0.598571
73
700
5.643836
0.438356
0.116505
0.174757
0.349515
0.490291
0.490291
0.490291
0.490291
0.490291
0.490291
0
0.268102
0.27
700
26
58
26.923077
0.53816
0.084286
0
0.444444
0
0
0.373786
0.349515
0
0
0
0
0.055556
1
0.055556
false
0
0.055556
0
0.111111
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f24d5c71764e9b6114ea1aafb04624db36b3eeb7
2,041
py
Python
extlinks/organisations/migrations/0001_initial.py
suecarmol/externallinks
388771924f0e0173237393226cb7549a02ae40e3
[ "MIT" ]
6
2019-12-05T13:14:45.000Z
2022-03-13T18:22:00.000Z
extlinks/organisations/migrations/0001_initial.py
WikipediaLibrary/externallinks
6519719a8b01ab121bf77c465c587af3762e99af
[ "MIT" ]
97
2019-07-01T14:42:51.000Z
2022-03-29T04:09:34.000Z
extlinks/organisations/migrations/0001_initial.py
suecarmol/externallinks
388771924f0e0173237393226cb7549a02ae40e3
[ "MIT" ]
8
2019-12-03T01:52:41.000Z
2020-08-19T00:26:46.000Z
# Generated by Django 2.2 on 2019-05-20 14:01 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ("programs", "0001_initial"), ] operations = [ migrations.CreateModel( name="Organisation", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=40)), ("limit_by_user", models.BooleanField(default=False)), ("username_list", models.TextField(blank=True, null=True)), ("username_list_url", models.URLField(blank=True, null=True)), ( "program", models.ForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="programs.Program", ), ), ], ), migrations.CreateModel( name="Collection", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=40)), ( "organisation", models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to="organisations.Organisation", ), ), ], ), ]
30.462687
78
0.398334
141
2,041
5.638298
0.41844
0.040252
0.05283
0.083019
0.392453
0.392453
0.392453
0.392453
0.392453
0.392453
0
0.021825
0.506124
2,041
66
79
30.924242
0.766865
0.021068
0
0.610169
1
0
0.081162
0.013026
0
0
0
0
0
1
0
false
0
0.033898
0
0.101695
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f24fcfba8c330084273c2c4d98a7ad885f703ede
682
py
Python
courses/migrations/0010_auto_20190717_0658.py
Isaacli0520/msnmatch
228c6d546e16bd54dc8c7e0803f0f8c408cb0219
[ "MIT" ]
null
null
null
courses/migrations/0010_auto_20190717_0658.py
Isaacli0520/msnmatch
228c6d546e16bd54dc8c7e0803f0f8c408cb0219
[ "MIT" ]
18
2020-03-11T18:57:27.000Z
2022-02-26T11:14:38.000Z
courses/migrations/0010_auto_20190717_0658.py
Isaacli0520/msnmatch
228c6d546e16bd54dc8c7e0803f0f8c408cb0219
[ "MIT" ]
null
null
null
# Generated by Django 2.1.5 on 2019-07-17 10:58 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('courses', '0009_course_type'), ] operations = [ migrations.RemoveField( model_name='instructor', name='name', ), migrations.AddField( model_name='instructor', name='first_name', field=models.CharField(default='Null', max_length=255), ), migrations.AddField( model_name='instructor', name='last_name', field=models.CharField(default='Null', max_length=255), ), ]
24.357143
67
0.565982
67
682
5.626866
0.58209
0.071618
0.151194
0.183024
0.466844
0.466844
0.249337
0.249337
0.249337
0
0
0.053419
0.313783
682
27
68
25.259259
0.752137
0.065982
0
0.47619
1
0
0.132283
0
0
0
0
0
0
1
0
false
0
0.047619
0
0.190476
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f259f06549c6c1c82ab7656b19015d14cf07ab1d
1,139
py
Python
member/backends.py
sillygod/django-as-pure-api-server
40f9993b4e2eff99d3a55e21ad4f4ac1f0daff95
[ "MIT" ]
1
2020-02-19T09:03:01.000Z
2020-02-19T09:03:01.000Z
member/backends.py
sillygod/django-as-pure-api-server
40f9993b4e2eff99d3a55e21ad4f4ac1f0daff95
[ "MIT" ]
23
2017-07-15T08:06:21.000Z
2022-03-11T23:26:00.000Z
member/backends.py
sillygod/django-as-pure-api-server
40f9993b4e2eff99d3a55e21ad4f4ac1f0daff95
[ "MIT" ]
null
null
null
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend from member.models import SocialUserData User = get_user_model() class Backend(ModelBackend): def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None class EmailPasswordBackend(Backend): """Authentication with user's email and password """ def authenticate(self, request, username=None, password=None, **kwargs): try: user = User.objects.get(email=username) except User.DoesNotExist: return None if user.check_password(password): return user class SocialLoginBackend(Backend): """Authentication with social service id """ def authenticate(self, request, service=None, username=None, **kwargs): try: user_data = SocialUserData.objects.get(service=service, username=username) return user_data.user except SocialUserData.DoesNotExist: return None
27.119048
76
0.639157
120
1,139
5.983333
0.35
0.029248
0.091922
0.058496
0.089136
0
0
0
0
0
0
0
0.281826
1,139
41
77
27.780488
0.877751
0.077261
0
0.307692
0
0
0
0
0
0
0
0
0
1
0.115385
false
0.115385
0.115385
0
0.576923
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
f27366ba3befc5342ad0525cddd291928c83a199
1,724
py
Python
models/subnet.py
gnydick/qairon
e67af1f88ac6c614ae33adc4f42ab2ec3cc5b257
[ "MIT" ]
null
null
null
models/subnet.py
gnydick/qairon
e67af1f88ac6c614ae33adc4f42ab2ec3cc5b257
[ "MIT" ]
null
null
null
models/subnet.py
gnydick/qairon
e67af1f88ac6c614ae33adc4f42ab2ec3cc5b257
[ "MIT" ]
null
null
null
import sqlalchemy from sqlalchemy import * from sqlalchemy.dialects.postgresql import CIDR from sqlalchemy.orm import relationship, Session from models import Network from db import db import ipaddress as ip class Subnet(db.Model): __tablename__ = "subnet" id = Column(String, primary_key=True) network_id = Column(String, ForeignKey('network.id'), nullable=False) native_id = Column(String) name = Column(String, nullable=False) cidr = Column(CIDR, nullable=False, ) defaults = Column(Text) native_id = Column(String) network = relationship("Network", back_populates="subnets") fleets = relationship("Fleet", secondary='subnets_fleets', back_populates="subnets") def __repr__(self): return self.id def net(self): return ip.IPv4Network(address=self.cidr) class SubnetUnavailableError(RuntimeError): def __init__(self, message, errors): super().__init__(message) self.errors = errors @db.event.listens_for(Subnet, 'before_update') def my_before_update_listener(mapper, connection, subnet): __update_id__(subnet) # TODO this shouldn't be a rest call, refactor it' @db.event.listens_for(Subnet, 'before_insert') def my_before_insert_listener(mapper, connection, subnet): newsubnet = ip.IPv4Network(address=subnet.cidr) session = db.session network = session.query(Network).filter_by(id=subnet.network_id).first() if newsubnet in [ip.IPv4Network(subnet.cidr) for subnet in network.subnets if subnet.id is not None]: error = SubnetUnavailableError("Already Used", null) return error __update_id__(subnet) def __update_id__(subnet): subnet.id = subnet.network_id + ':' + subnet.name
28.262295
105
0.725638
217
1,724
5.529954
0.391705
0.04
0.046667
0.033333
0.048333
0.048333
0
0
0
0
0
0.002099
0.171114
1,724
60
106
28.733333
0.837649
0.027842
0
0.1
0
0
0.05675
0
0
0
0
0.016667
0
1
0.15
false
0
0.175
0.05
0.7
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
f27f33f7e2d2c8e984137e0bb0537f02bdd111ef
1,026
py
Python
eggs/SPARQLWrapper-1.6.4-py2.7.egg/SPARQLWrapper/SPARQLUtils.py
salayhin/talkofacta
8b5a14245dd467bb1fda75423074c4840bd69fb7
[ "MIT" ]
null
null
null
eggs/SPARQLWrapper-1.6.4-py2.7.egg/SPARQLWrapper/SPARQLUtils.py
salayhin/talkofacta
8b5a14245dd467bb1fda75423074c4840bd69fb7
[ "MIT" ]
null
null
null
eggs/SPARQLWrapper-1.6.4-py2.7.egg/SPARQLWrapper/SPARQLUtils.py
salayhin/talkofacta
8b5a14245dd467bb1fda75423074c4840bd69fb7
[ "MIT" ]
null
null
null
# -*- coding: utf8 -*- """ SPARQL Wrapper Utils @authors: U{Ivan Herman<http://www.ivan-herman.net>}, U{Sergio Fernández<http://www.wikier.org>}, U{Carlos Tejo Alonso<http://www.dayures.net>} @organization: U{World Wide Web Consortium<http://www.w3.org>} and U{Foundation CTIC<http://www.fundacionctic.org/>}. @license: U{W3C SOFTWARE NOTICE AND LICENSE<href="http://www.w3.org/Consortium/Legal/copyright-software">} """ import warnings def deprecated(func): """ This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emmitted when the function is used. @see: http://code.activestate.com/recipes/391367/ """ def newFunc(*args, **kwargs): warnings.warn("Call to deprecated function %s." % func.__name__, category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc
34.2
143
0.691033
137
1,026
4.970803
0.620438
0.061674
0.026432
0.035242
0
0
0
0
0
0
0
0.012896
0.168616
1,026
29
144
35.37931
0.785463
0.587719
0
0
0
0
0.084011
0
0
0
0
0
0
1
0.222222
false
0
0.111111
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
f28388b710b94642874df0ff47f2f4d1dc880ca1
1,154
py
Python
minilib/Servo.py
atikul99/minibot
7d15f1f39b36c14440ba13388e5657a210780201
[ "MIT" ]
1
2019-11-16T01:47:26.000Z
2019-11-16T01:47:26.000Z
minilib/Servo.py
a-abir/minibot
7d15f1f39b36c14440ba13388e5657a210780201
[ "MIT" ]
null
null
null
minilib/Servo.py
a-abir/minibot
7d15f1f39b36c14440ba13388e5657a210780201
[ "MIT" ]
null
null
null
from adafruit_crickit import crickit class Servo: Servos = [crickit.servo_1, crickit.servo_2, crickit.servo_3, crickit.servo_4] def __init__(self, ID): ''' Initialize a Servo :param ID: ID of the Servo [0,1,2,3] :type ID: int ''' self.servo = self.Servos[ID] def angle(self, degree): ''' Set the angle to rotate to :param degree: degree of the Servo :type degree: int ''' self.servo.angle = degree class ContiniousServo: ContiniousServos = [crickit.continuous_servo_1, crickit.continuous_servo_2, crickit.continuous_servo_3, crickit.continuous_servo_4] def __init__(self, ID): ''' Initialize a Continious Servo :param ID: ID of the Continious Servo [0,1,2,3] :type ID: int ''' self.servo = self.ContiniousServos[ID] def throttle(self, power): ''' Set the throttle of the Continious Servo :param power: Power of the Continious Servo -1 to 1 :type power: float ''' self.servo.throttle = power
25.644444
79
0.581456
142
1,154
4.577465
0.246479
0.038462
0.135385
0.092308
0.246154
0.246154
0.187692
0.187692
0.095385
0.095385
0
0.023196
0.327556
1,154
44
80
26.227273
0.814433
0.309359
0
0.133333
0
0
0
0
0
0
0
0
0
1
0.266667
false
0
0.066667
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
f2862de34482807061d1b8deb14718c443de37b3
703
py
Python
Rota_System/StandardTimes.py
ergoregion/Rota-Program
44dab4cb11add184619d88aa0fcab61532d128e6
[ "MIT" ]
null
null
null
Rota_System/StandardTimes.py
ergoregion/Rota-Program
44dab4cb11add184619d88aa0fcab61532d128e6
[ "MIT" ]
null
null
null
Rota_System/StandardTimes.py
ergoregion/Rota-Program
44dab4cb11add184619d88aa0fcab61532d128e6
[ "MIT" ]
null
null
null
__author__ = 'Neil Butcher' from datetime import time, date, datetime StandardEventTimes = [] StandardEventTimes.append(('noon', time(12, 00, 00))) StandardEventTimes.append(('midnight', time(00, 00, 00))) def time_string(a_time): for seTime in StandardEventTimes: if a_time == seTime[1]: return seTime[0] return a_time.strftime("%H:%M") def date_string(a_date): return a_date.strftime("%d. %B %Y") def get_time(a_string): for seTime in StandardEventTimes: if a_string == seTime[0]: return seTime[1] return (time.strptime(a_string, "%H:%M")).time() def get_date(a_string): return (datetime.strptime(a_string, "%d. %B %Y")).date()
24.241379
60
0.655761
98
703
4.520408
0.326531
0.079007
0.049661
0.130926
0.14447
0.14447
0
0
0
0
0
0.028219
0.193457
703
29
60
24.241379
0.753086
0
0
0.105263
0
0
0.073864
0
0
0
0
0
0
1
0.210526
false
0
0.052632
0.105263
0.578947
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
f2939742cefed71107c248d2bad7a561188a2e30
226
py
Python
Interfaces/__init__.py
StefanoFrazzetto/CrimeDetector
a5d39a3b44e8732502b9f6b41d44c8244940ebe6
[ "Apache-2.0" ]
2
2019-04-10T12:13:34.000Z
2020-10-02T05:23:58.000Z
Interfaces/__init__.py
StefanoFrazzetto/CrimeDetector
a5d39a3b44e8732502b9f6b41d44c8244940ebe6
[ "Apache-2.0" ]
null
null
null
Interfaces/__init__.py
StefanoFrazzetto/CrimeDetector
a5d39a3b44e8732502b9f6b41d44c8244940ebe6
[ "Apache-2.0" ]
null
null
null
from .Factorizable import Factorizable from .Serializable import Serializable from .Analyzable import AnalyzableLabel, Analyzable __all__ = [ 'Analyzable', 'AnalyzableLabel', 'Factorizable', 'Serializable', ]
20.545455
51
0.747788
18
226
9.166667
0.388889
0
0
0
0
0
0
0
0
0
0
0
0.168142
226
10
52
22.6
0.87766
0
0
0
0
0
0.216814
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
f2a7a473271488448e36adba1dacc137789a8198
1,625
py
Python
legal_advice_builder/urls.py
prototypefund/django-legal-advice-builder
081987d803f9ab38f8ac8dfc327f711dd48f0759
[ "MIT" ]
4
2021-07-22T10:16:49.000Z
2022-01-27T16:41:55.000Z
legal_advice_builder/urls.py
prototypefund/django-legal-advice-builder
081987d803f9ab38f8ac8dfc327f711dd48f0759
[ "MIT" ]
10
2021-08-29T11:37:17.000Z
2022-03-22T18:20:21.000Z
legal_advice_builder/urls.py
prototypefund/django-legal-advice-builder
081987d803f9ab38f8ac8dfc327f711dd48f0759
[ "MIT" ]
1
2022-02-14T09:41:34.000Z
2022-02-14T09:41:34.000Z
from django.urls import path from .admin_views import DocumentCreateView from .admin_views import DocumentFormView from .admin_views import DocumentPreviewView from .admin_views import LawCaseDelete from .admin_views import LawCaseEdit from .admin_views import LawCaseList from .admin_views import LawCasePreview from .admin_views import QuestionaireCreate from .admin_views import QuestionaireDeleteView from .admin_views import QuestionaireDetail from .admin_views import QuestionDelete from .admin_views import QuestionUpdate app_name = 'legal_advice_builder' urlpatterns = [ path('', LawCaseList.as_view(), name='law-case-list'), path('<int:pk>/edit/', LawCaseEdit.as_view(), name='law-case-edit'), path('<int:pk>/delete/', LawCaseDelete.as_view(), name='law-case-delete'), path('<int:pk>/preview/', LawCasePreview.as_view(), name='law-case-preview'), path('<int:pk>/document/create/', DocumentCreateView.as_view(), name='document-create'), path('<int:pk>/questionaire/create/', QuestionaireCreate.as_view(), name='questionaire-create'), path('questionaire/<int:pk>/', QuestionaireDetail.as_view(), name='questionaire-detail'), path('questionaire/<int:pk>/delete', QuestionaireDeleteView.as_view(), name='questionaire-delete'), path('question/<int:pk>/edit', QuestionUpdate.as_view(), name='question-update'), path('question/<int:pk>/delete', QuestionDelete.as_view(), name='question-delete'), path('document/<int:pk>/edit/', DocumentFormView.as_view(), name='document-update'), path('document/<int:pk>/preview/', DocumentPreviewView.as_view(), name='document-detail') ]
52.419355
103
0.757538
199
1,625
6.050251
0.20603
0.089701
0.139535
0.199336
0.056478
0
0
0
0
0
0
0
0.089231
1,625
30
104
54.166667
0.813514
0
0
0
0
0
0.28
0.122462
0
0
0
0
0
1
0
false
0
0.464286
0
0.464286
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
f2aa5a0a63f130bdfaedececa186d22d3490c5da
2,693
py
Python
recipe_blog/users/tests/test_models.py
hossshakiba/recipe-blog-api
755e4b4f10aa7b1918b260a5823900fb41bfff1b
[ "MIT" ]
2
2021-07-07T21:20:48.000Z
2021-08-17T04:36:41.000Z
recipe_blog/users/tests/test_models.py
hossshakiba/recipe-blog-api
755e4b4f10aa7b1918b260a5823900fb41bfff1b
[ "MIT" ]
null
null
null
recipe_blog/users/tests/test_models.py
hossshakiba/recipe-blog-api
755e4b4f10aa7b1918b260a5823900fb41bfff1b
[ "MIT" ]
null
null
null
from datetime import timedelta from django.test import TestCase from django.contrib.auth import get_user_model class ModelTest(TestCase): def test_create_user(self): """Test create user""" User = get_user_model() username = 'testuser' email = 'user@test.com' password = 'password' user = User.objects.create_user(username=username, email=email, password=password) self.assertEqual(user.username, username) self.assertEqual(user.email, email) self.assertTrue(user.check_password(password)) self.assertTrue(user.is_active) self.assertFalse(user.is_staff) self.assertFalse(user.is_superuser) def test_create_super_user(self): """Test create super user""" User = get_user_model() username = 'superuser' email = 'super@test.com' password = 'superduperpassword' user = User.objects.create_superuser(username=username, email=email, password=password) self.assertEqual(user.username, username) self.assertEqual(user.email, email) self.assertTrue(user.check_password(password)) self.assertTrue(user.is_active) self.assertTrue(user.is_staff) self.assertTrue(user.is_superuser) def test_create_user_email_normalized(self): """Test normalizing of user's email""" User = get_user_model() username = 'testuser' email = 'test@TEST.com' password = 'password' user = User.objects.create_user(username=username, email=email, password=password) self.assertEqual(user.email, email.lower()) # def test_new_user_email_required(self): # """Test email is a required field""" # with self.assertRaises(ValueError): # get_user_model().objects.create_user(username='test', email=None, password='password') def test_user_is_special(self): """Test if is_specail_member fuction works fine for a user""" User = get_user_model() username = 'testuser' email = 'test@test.com' password = 'password' user = User.objects.create_user(username=username, email=email, password=password) user.is_special += timedelta(days=3) self.assertTrue(user.is_special_member()) def test_super_user_is_special(self): """Test if is_specail_member fuction works fine for a superuser""" User = get_user_model() username = 'superuser' email = 'super@test.com' password = 'superduperpassword' user = User.objects.create_superuser(username=username, email=email, password=password) self.assertTrue(user.is_special_member())
39.602941
100
0.6684
316
2,693
5.515823
0.177215
0.100975
0.082616
0.068847
0.70568
0.699943
0.629948
0.629948
0.604131
0.604131
0
0.000479
0.225399
2,693
67
101
40.19403
0.835091
0.150761
0
0.64
0
0
0.074845
0
0
0
0
0
0.3
1
0.1
false
0.24
0.06
0
0.18
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
f2ab5de122c16eb0e77960a633d21d6aa9037fa6
721
py
Python
test.py
threefoldtech/JumpscaleX
5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa
[ "Apache-2.0" ]
2
2019-05-09T07:21:25.000Z
2019-08-05T06:37:53.000Z
test.py
threefoldtech/JumpscaleX
5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa
[ "Apache-2.0" ]
664
2018-12-19T12:43:44.000Z
2019-08-23T04:24:42.000Z
test.py
threefoldtech/jumpscale10
5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa
[ "Apache-2.0" ]
7
2019-05-03T07:14:37.000Z
2019-08-05T12:36:52.000Z
from Jumpscale import j import traceback import sys def schema(): j.data.schema.test() j.data.types.test() def bcdb(): j.tools.tmux.kill() assert len(j.tools.tmux.server.sessions) == 1 # j.servers.zdb.test(build=True) # j.clients.zdb.test() j.data.bcdb.test() def servers(): j.tools.tmux.kill() if j.core.platformtype.myplatform.isUbuntu: j.builders.web.traefik.install() # j.builders.db.etcd.install() j.builders.network.coredns.install() def ssh(): # j.clients.sshagent.test() #should not do, because in container there will be no ssh-key loaded any more to continue the tests j.clients.sshkey.test() # schema() bcdb() # ssh() # servers()
19.486486
132
0.653259
106
721
4.443396
0.556604
0.031847
0.063694
0.059448
0
0
0
0
0
0
0
0.00173
0.198336
721
36
133
20.027778
0.813149
0.320388
0
0.111111
0
0
0
0
0
0
0
0
0.055556
1
0.222222
true
0
0.166667
0
0.388889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
2
f2b5468b75dfceddb72ec8353c572bbe7513894a
158
py
Python
aiocloudflare/api/user/load_balancers/preview/preview.py
Stewart86/aioCloudflare
341c0941f8f888a8b7e696e64550bce5da4949e6
[ "MIT" ]
2
2021-09-14T13:20:55.000Z
2022-02-24T14:18:24.000Z
aiocloudflare/api/user/load_balancers/preview/preview.py
Stewart86/aioCloudflare
341c0941f8f888a8b7e696e64550bce5da4949e6
[ "MIT" ]
46
2021-09-08T08:39:45.000Z
2022-03-29T12:31:05.000Z
aiocloudflare/api/user/load_balancers/preview/preview.py
Stewart86/aioCloudflare
341c0941f8f888a8b7e696e64550bce5da4949e6
[ "MIT" ]
1
2021-12-30T23:02:23.000Z
2021-12-30T23:02:23.000Z
from aiocloudflare.commons.auth import Auth class Preview(Auth): _endpoint1 = "user/load_balancers/preview" _endpoint2 = None _endpoint3 = None
19.75
46
0.740506
18
158
6.277778
0.777778
0
0
0
0
0
0
0
0
0
0
0.023256
0.183544
158
7
47
22.571429
0.852713
0
0
0
0
0
0.170886
0.170886
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f2bb007d73bc265357ffff25a387b8f6f687524c
234
py
Python
utils/security/lookup.py
pressol/utils
9ee8789321042a2c674c5483a44032c13d392c64
[ "MIT" ]
null
null
null
utils/security/lookup.py
pressol/utils
9ee8789321042a2c674c5483a44032c13d392c64
[ "MIT" ]
3
2021-04-20T23:06:51.000Z
2022-01-19T20:17:41.000Z
utils/security/lookup.py
pressol/utils
9ee8789321042a2c674c5483a44032c13d392c64
[ "MIT" ]
1
2022-01-19T20:17:20.000Z
2022-01-19T20:17:20.000Z
def inlookuptable(value: str, lookuptable: str): with open(lookuptable, "r") as lookuptablestream: for table in lookuptablestream: if table.strip("\n") == value: return True return False
23.4
53
0.611111
25
234
5.72
0.76
0
0
0
0
0
0
0
0
0
0
0
0.294872
234
9
54
26
0.866667
0
0
0
0
0
0.012987
0
0
0
0
0
0
1
0.166667
false
0
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
f2c59b13f76e051d9e60af0c1bee23d35c0c4f25
288
py
Python
stock/serializers.py
mrivadeneira/fullapp_project
8d91793262b351d99fd8db2cb187650aa90dddb5
[ "Unlicense" ]
null
null
null
stock/serializers.py
mrivadeneira/fullapp_project
8d91793262b351d99fd8db2cb187650aa90dddb5
[ "Unlicense" ]
null
null
null
stock/serializers.py
mrivadeneira/fullapp_project
8d91793262b351d99fd8db2cb187650aa90dddb5
[ "Unlicense" ]
null
null
null
from rest_framework import serializers from .models import Stock class StockSerializer(serializers.ModelSerializer): class Meta: model = Stock fields = ('pk', 'sku', 'category', 'stock_type', 'color', 'size', 'movement', 'movement_date', 'movement_registration')
26.181818
127
0.697917
30
288
6.566667
0.733333
0
0
0
0
0
0
0
0
0
0
0
0.177083
288
10
128
28.8
0.831224
0
0
0
0
0
0.258741
0.073427
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
4b81c5470c931cdb6f215b44f2d45522ded37b39
1,171
py
Python
parse_recipes.py
meooow25/Emoji2recipe
802c4556c66cccd61814e6b169f0eecbc3646ec1
[ "MIT" ]
5
2019-11-19T22:40:28.000Z
2021-11-29T00:34:52.000Z
parse_recipes.py
meooow25/Emoji2recipe
802c4556c66cccd61814e6b169f0eecbc3646ec1
[ "MIT" ]
1
2018-10-30T22:19:13.000Z
2018-10-30T22:19:13.000Z
parse_recipes.py
meooow25/Emoji2recipe
802c4556c66cccd61814e6b169f0eecbc3646ec1
[ "MIT" ]
9
2019-05-10T21:17:55.000Z
2021-09-13T12:51:26.000Z
import os from os import path from glob import glob import json import re import pickle import argparse import numpy as np from scipy import ndimage, misc from config import cfg from parse_ingredients import preprocess_ingredients def load_recipe(filename): """Load a single recipe file """ with open(filename, 'r') as f: recipes = json.load(f) print('Loaded {:,} recipes from {}'.format(len(recipes), filename)) return recipes def clean_recipe_ingredients(recipes): """Clean and parse recipe ingedients """ recipes_clean = {} for key, value in recipes.items(): if "ingredients" not in value.keys(): continue value['ingredients_clean'] = preprocess_ingredients(value['ingredients']) recipes_clean[key] = value return recipes_clean def load_recipes(): """Load all raw recipes and combine to single dataset (json format) """ recipes = {} print(path.join(cfg.DATA.RAW_DATA_DIR, 'recipes_raw*.json')) for filename in glob(path.join(cfg.DATA.RAW_DATA_DIR, 'recipes_raw*.json')): print (filename) recipes.update(load_recipe(filename)) print('Loaded {:,} recipes in total'.format(len(recipes))) return clean_recipe_ingredients(recipes)
26.613636
77
0.744663
165
1,171
5.163636
0.357576
0.056338
0.042254
0.068075
0.091549
0.091549
0.091549
0.091549
0.091549
0.091549
0
0
0.140905
1,171
43
78
27.232558
0.846918
0.121264
0
0
0
0
0.125854
0
0
0
0
0
0
1
0.09375
false
0
0.34375
0
0.53125
0.125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
4b9d7d79c1cb82592cc879134aa07ea753fcad4f
232
py
Python
Modulos_Python/quebrandoNumero.py
miguelsrrobo/JS-Public_
1f4f661fdf2d3c157a21f0cc91f125f88f066759
[ "MIT" ]
null
null
null
Modulos_Python/quebrandoNumero.py
miguelsrrobo/JS-Public_
1f4f661fdf2d3c157a21f0cc91f125f88f066759
[ "MIT" ]
null
null
null
Modulos_Python/quebrandoNumero.py
miguelsrrobo/JS-Public_
1f4f661fdf2d3c157a21f0cc91f125f88f066759
[ "MIT" ]
null
null
null
from math import trunc numero = float(input('Digite um numero inteiro: ')) print('O valor digitado foi {} e a sua porção inteira é {}'.format(numero, trunc(numero))) ''' int (numero) => tambem funciona para estrair um inteiro '''
25.777778
90
0.702586
34
232
4.794118
0.794118
0.134969
0
0
0
0
0
0
0
0
0
0
0.159483
232
8
91
29
0.835897
0
0
0
0
0
0.455621
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
4b9f629a6f879d552cb2bd08d2f5119fb408e6ae
728
py
Python
conditionals/exam_grade.py
fa-alvarez/python-examples
d49c86ecc97ae2802b2470240170f2144b2f1d64
[ "Unlicense" ]
null
null
null
conditionals/exam_grade.py
fa-alvarez/python-examples
d49c86ecc97ae2802b2470240170f2144b2f1d64
[ "Unlicense" ]
null
null
null
conditionals/exam_grade.py
fa-alvarez/python-examples
d49c86ecc97ae2802b2470240170f2144b2f1d64
[ "Unlicense" ]
null
null
null
#!/usr/bin/env python3 def exam_grade(score): """Students in a class receive their grades as Pass/Fail. Scores of 60 or more (out of 100) mean that the grade is "Pass". For lower scores, the grade is "Fail". In addition, scores above 95 (not included) are graded as "Top Score". This function receives the score and returns the proper grade.""" if score == 100: grade = "Top Score" elif score >= 60: grade = "Pass" else: grade = "Fail" return grade print(help(exam_grade)) print(exam_grade(65)) # Should be Pass print(exam_grade(55)) # Should be Fail print(exam_grade(60)) # Should be Pass print(exam_grade(95)) # Should be Pass print(exam_grade(100)) # Should be Top Score print(exam_grade(0)) # Should be Fail
31.652174
79
0.715659
124
728
4.137097
0.451613
0.140351
0.163743
0.099415
0.152047
0.152047
0
0
0
0
0
0.041459
0.171703
728
22
80
33.090909
0.809287
0.561813
0
0
0
0
0.055921
0
0
0
0
0
0
1
0.066667
false
0.066667
0
0
0.133333
0.466667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
1
0
2
4baa76299d69e0982250e3c80ec495c12a1f244b
1,375
py
Python
jupiter/domain/big_plans/infra/big_plan_repository.py
horia141/jupiter
2c721d1d44e1cd2607ad9936e54a20ea254741dc
[ "MIT" ]
15
2019-05-05T14:34:58.000Z
2022-02-25T09:57:28.000Z
jupiter/domain/big_plans/infra/big_plan_repository.py
horia141/jupiter
2c721d1d44e1cd2607ad9936e54a20ea254741dc
[ "MIT" ]
3
2020-02-22T16:09:39.000Z
2021-12-18T21:33:06.000Z
jupiter/domain/big_plans/infra/big_plan_repository.py
horia141/jupiter
2c721d1d44e1cd2607ad9936e54a20ea254741dc
[ "MIT" ]
null
null
null
"""A repository of big plans.""" import abc from typing import Optional, Iterable from jupiter.domain.big_plans.big_plan import BigPlan from jupiter.domain.big_plans.big_plan_collection import BigPlanCollection from jupiter.framework.base.entity_id import EntityId from jupiter.framework.storage import Repository class BigPlanNotFoundError(Exception): """Error raised when a big plan was not found.""" class BigPlanRepository(Repository, abc.ABC): """A repository of big plans.""" @abc.abstractmethod def create(self, big_plan_collection: BigPlanCollection, big_plan: BigPlan) -> BigPlan: """Create a big plan.""" @abc.abstractmethod def save(self, big_plan: BigPlan) -> BigPlan: """Save a big plan - it should already exist.""" @abc.abstractmethod def load_by_id(self, ref_id: EntityId, allow_archived: bool = False) -> BigPlan: """Load a big plan by id.""" @abc.abstractmethod def find_all( self, allow_archived: bool = False, filter_ref_ids: Optional[Iterable[EntityId]] = None, filter_big_plan_collection_ref_ids: Optional[Iterable[EntityId]] = None) -> Iterable[BigPlan]: """Find all big plans.""" @abc.abstractmethod def remove(self, ref_id: EntityId) -> BigPlan: """Hard remove a big plan - an irreversible operation."""
33.536585
106
0.693091
172
1,375
5.401163
0.343023
0.082885
0.043057
0.034446
0.238967
0.142088
0.068891
0
0
0
0
0
0.201455
1,375
40
107
34.375
0.846084
0.184727
0
0.227273
0
0
0
0
0
0
0
0
0
1
0.227273
false
0
0.272727
0
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
4bb14289b061ddb38728ad523069796f3a19a38b
313
py
Python
demos/check_hdf5_hash.py
bengranett/catstore
a61f922fdef4d5f5189c8d9cad830bd8dd981900
[ "MIT" ]
null
null
null
demos/check_hdf5_hash.py
bengranett/catstore
a61f922fdef4d5f5189c8d9cad830bd8dd981900
[ "MIT" ]
null
null
null
demos/check_hdf5_hash.py
bengranett/catstore
a61f922fdef4d5f5189c8d9cad830bd8dd981900
[ "MIT" ]
1
2019-10-28T10:54:13.000Z
2019-10-28T10:54:13.000Z
import sys import pypelid.utils.filetools as ft for filename in sys.argv[1:]: check, hashes = ft.check_hdf5_hash(filename) if check: print "%s: %s checksum passed :D"%(filename, hashes[0]) else: print "%s: checksum failed :( (read:%s computed:%s)"%(filename, hashes[0], hashes[1])
24.076923
93
0.642173
46
313
4.326087
0.586957
0.060302
0.150754
0
0
0
0
0
0
0
0
0.020161
0.207668
313
12
94
26.083333
0.782258
0
0
0
0
0
0.220447
0
0
0
0
0
0
0
null
null
0.125
0.25
null
null
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
4bc00a227cefe01ad00f60e2ad8f15b3a7586721
795
py
Python
sistemas/models.py
luisfarfan/django_intranetapp
bb7e0c08706d8fdf9b1eae437baa1f5850dbd0bd
[ "Apache-2.0" ]
null
null
null
sistemas/models.py
luisfarfan/django_intranetapp
bb7e0c08706d8fdf9b1eae437baa1f5850dbd0bd
[ "Apache-2.0" ]
null
null
null
sistemas/models.py
luisfarfan/django_intranetapp
bb7e0c08706d8fdf9b1eae437baa1f5850dbd0bd
[ "Apache-2.0" ]
null
null
null
from django.db import models from django.contrib import admin class Sistema(models.Model): nombre = models.CharField(max_length=100) descripcion = models.TextField() codigo = models.CharField(max_length=8) usr_creacion = models.CharField(max_length=100, blank=True, null=True) fec_creacion = models.DateTimeField(blank=True, null=True) usr_edicion = models.CharField(max_length=100, blank=True, null=True) fec_edicion = models.DateTimeField(blank=True, null=True) estado = models.IntegerField(default=1) def __unicode__(self): return '%s , %s' % (self.codigo, self.nombre) class Meta: managed = True db_table = 'SISTEMA' @admin.register(Sistema) class ProyectoAdmin(admin.ModelAdmin): list_display = ('codigo', 'nombre')
30.576923
74
0.710692
100
795
5.51
0.44
0.108893
0.130672
0.174229
0.350272
0.30127
0.170599
0.170599
0.170599
0.170599
0
0.016743
0.173585
795
25
75
31.8
0.821918
0
0
0
0
0
0.032704
0
0
0
0
0
0
1
0.052632
false
0
0.105263
0.052632
0.842105
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
299ac5adc076c1fcf2728cd7fb5c4c604e8348aa
1,836
py
Python
src/schnetpack/md/simulation_hooks/basic_hooks.py
sxie22/schnetpack
a421e7c121c7bdb2838fb30f887812110ecfa3c6
[ "MIT" ]
null
null
null
src/schnetpack/md/simulation_hooks/basic_hooks.py
sxie22/schnetpack
a421e7c121c7bdb2838fb30f887812110ecfa3c6
[ "MIT" ]
null
null
null
src/schnetpack/md/simulation_hooks/basic_hooks.py
sxie22/schnetpack
a421e7c121c7bdb2838fb30f887812110ecfa3c6
[ "MIT" ]
1
2022-02-10T17:39:11.000Z
2022-02-10T17:39:11.000Z
from __future__ import annotations import torch.nn as nn from schnetpack.md.utils import UninitializedMixin from typing import TYPE_CHECKING if TYPE_CHECKING: from schnetpack.md import Simulator __all__ = ["RemoveCOMMotion", "SimulationHook"] class SimulationHook(UninitializedMixin, nn.Module): """ Basic class for simulator hooks """ def on_step_begin(self, simulator: Simulator): pass def on_step_middle(self, simulator: Simulator): pass def on_step_end(self, simulator: Simulator): pass def on_step_finalize(self, simulator: Simulator): pass def on_step_failed(self, simulator: Simulator): pass def on_simulation_start(self, simulator: Simulator): pass def on_simulation_end(self, simulator: Simulator): pass class RemoveCOMMotion(SimulationHook): """ Periodically remove motions of the center of mass from the system. Args: every_n_steps (int): Frequency with which motions are removed. remove_rotation (bool): Also remove rotations. wrap_positions: Wrap atom positions back to box in periodic simulations. """ def __init__(self, every_n_steps: int, remove_rotation: bool, wrap_positions: bool): super(RemoveCOMMotion, self).__init__() self.every_n_steps = every_n_steps self.remove_rotation = remove_rotation self.wrap_positions = wrap_positions def on_step_finalize(self, simulator: Simulator): if simulator.step % self.every_n_steps == 0: simulator.system.remove_center_of_mass() simulator.system.remove_translation() if self.remove_rotation: simulator.system.remove_com_rotation() if self.wrap_positions: simulator.system.wrap_positions()
27.402985
88
0.691176
215
1,836
5.618605
0.334884
0.033113
0.145695
0.150662
0.280629
0.222682
0.222682
0
0
0
0
0.000714
0.236928
1,836
66
89
27.818182
0.861527
0.163399
0
0.25
0
0
0.019476
0
0
0
0
0
0
1
0.25
false
0.194444
0.138889
0
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
29a679fe472885838a85805ec387c66c860b0f41
189
py
Python
telegram/bot.py
Fawers/horriblesubs-hourly-notifier
b8c610d9fdd6494976e00e5a7fad746013669280
[ "MIT" ]
10
2019-01-14T10:24:53.000Z
2022-03-29T02:39:09.000Z
telegram/bot.py
Fawers/horriblesubs-hourly-notifier
b8c610d9fdd6494976e00e5a7fad746013669280
[ "MIT" ]
null
null
null
telegram/bot.py
Fawers/horriblesubs-hourly-notifier
b8c610d9fdd6494976e00e5a7fad746013669280
[ "MIT" ]
null
null
null
import os import telepot CHANNEL = os.getenv('TGCHANNEL') BOT = telepot.Bot(os.getenv('TGBOT')) def send_to_channel(message): BOT.sendMessage(CHANNEL, message, parse_mode='html')
14.538462
56
0.730159
26
189
5.192308
0.615385
0.118519
0
0
0
0
0
0
0
0
0
0
0.132275
189
12
57
15.75
0.823171
0
0
0
0
0
0.095238
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
29adaaa6f06669c1750dfd09400ea03289d45b62
933
py
Python
iridium/libs/trackers/tracker.py
Toure/Rhea
fda0e4cd7c568943725245393bfe762bc858e917
[ "Apache-2.0" ]
1
2015-08-19T15:55:46.000Z
2015-08-19T15:55:46.000Z
iridium/libs/trackers/tracker.py
Toure/Rhea
fda0e4cd7c568943725245393bfe762bc858e917
[ "Apache-2.0" ]
null
null
null
iridium/libs/trackers/tracker.py
Toure/Rhea
fda0e4cd7c568943725245393bfe762bc858e917
[ "Apache-2.0" ]
null
null
null
__author__ = "Toure Dunnon" __license__ = "Apache License 2.0" __version__ = "0.1" __email__ = "toure@redhat.com" __status__ = "Alpha" import abc from importlib import import_module class TrackerBase(object): @staticmethod def import_mod(platform_name): """ Import mod will return an initialized import path from the specified module name. :param module_name: tracker module name of interest (str). :return: import object of requested module name. """ return import_module("iridium.libs.trackers.%s" % platform_name) class Tracker(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def create_case(self): pass @abc.abstractmethod def update_case(self, bug_id, updates): pass @abc.abstractmethod def update_flag(self, id, flags_status): pass @abc.abstractmethod def update_comment(self): pass
22.214286
89
0.676313
111
933
5.36036
0.522523
0.067227
0.134454
0.121008
0.151261
0
0
0
0
0
0
0.005634
0.239014
933
41
90
22.756098
0.832394
0.203644
0
0.32
0
0
0.111111
0.034188
0
0
0
0
0
1
0.2
false
0.16
0.16
0
0.52
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
29b3b2c587f760ce7808219daad6f2cd6fda1e05
953
py
Python
powerwallrl/powerplans/australia/wa/synergy.py
danielwoz/powerwall-rl
e27d57ed544b2523bd6e125b4d116a89845de734
[ "MIT" ]
null
null
null
powerwallrl/powerplans/australia/wa/synergy.py
danielwoz/powerwall-rl
e27d57ed544b2523bd6e125b4d116a89845de734
[ "MIT" ]
null
null
null
powerwallrl/powerplans/australia/wa/synergy.py
danielwoz/powerwall-rl
e27d57ed544b2523bd6e125b4d116a89845de734
[ "MIT" ]
null
null
null
""" Synergy grid and feedback costs. """ # Author: Daniel Williams __version__ = '0.0.1' from powerwallrl.powerplans.australia.wa import WesternAustralia class Synergy(WesternAustralia): pass class Rebs(Synergy): def feedback(self, _): return 7.1350 class Debs(Synergy): def feedback(self, dt): if dt.hour >= 15 and dt.hour < 21: return 10.0 return 2.75 class SmartHome(Synergy): def usage(self, dt): # Every day offpeak if (dt.hour > 21 or dt.hour < 7): return 15.3645 # Weekend Shoulder if (dt.weekday() == 5 or dt.weekday() == 6): return 29.2100 # Weekday Shoulder if (dt.hour > 7 and dt.hour < 15): return 29.2100 # Weekday Peak return 55.7734 class A1(Synergy): def usage(self, _): return 29.3273 class A1_Debs(A1, Debs): pass class A1_Rebs(A1, Debs): pass class SmartHome_Debs(SmartHome, Debs): pass class SmartHome_Rebs(SmartHome, Debs): pass
18.686275
64
0.653725
138
953
4.442029
0.391304
0.058728
0.039152
0.071778
0
0
0
0
0
0
0
0.083333
0.231899
953
50
65
19.06
0.754098
0.129066
0
0.21875
0
0
0.006112
0
0
0
0
0
0
1
0.125
false
0.15625
0.03125
0.0625
0.6875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
29c22902e289fffef380e013c5cec077ff27720a
266
py
Python
tests/data/cli_args.py
festeh/docker-pretty-ps
582c2d811bc8f79fa66b25d1ac37ec27e871921f
[ "MIT" ]
241
2019-01-19T13:40:41.000Z
2022-03-25T20:15:56.000Z
tests/data/cli_args.py
festeh/docker-pretty-ps
582c2d811bc8f79fa66b25d1ac37ec27e871921f
[ "MIT" ]
13
2018-11-24T22:14:20.000Z
2022-03-01T14:37:49.000Z
tests/data/cli_args.py
festeh/docker-pretty-ps
582c2d811bc8f79fa66b25d1ac37ec27e871921f
[ "MIT" ]
20
2019-01-19T17:42:54.000Z
2022-02-23T12:16:04.000Z
class CliArgs(object): def __init__(self): self.search = [] self.all = False self.slim = False self.include = False self.order = False self.reverse = False self.json = False self.version = False
19
28
0.537594
29
266
4.793103
0.517241
0.388489
0
0
0
0
0
0
0
0
0
0
0.37218
266
13
29
20.461538
0.832335
0
0
0
0
0
0
0
0
0
0
0
0
1
0.1
false
0
0
0
0.2
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2