hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
31cbd70691028fefc6fce482f7c57b4460593cf1 | 1,875 | py | Python | {{cookiecutter.project_name}}/{{cookiecutter.project_slug}}/views.py | gbozee/api-scaffold | 6e4f75f0fe26e4f567158ce2d8dde0abbd6b1063 | [
"MIT"
] | null | null | null | {{cookiecutter.project_name}}/{{cookiecutter.project_slug}}/views.py | gbozee/api-scaffold | 6e4f75f0fe26e4f567158ce2d8dde0abbd6b1063 | [
"MIT"
] | null | null | null | {{cookiecutter.project_name}}/{{cookiecutter.project_slug}}/views.py | gbozee/api-scaffold | 6e4f75f0fe26e4f567158ce2d8dde0abbd6b1063 | [
"MIT"
] | null | null | null | import importlib
import typing
import databases
from starlette.applications import Starlette
from starlette.authentication import requires
from starlette.background import BackgroundTasks
from starlette.endpoints import HTTPEndpoint
from starlette.exceptions import HTTPException
from starlette.routing import Route
from starlette.requests import Request
from starlette.responses import (JSONResponse, PlainTextResponse,
RedirectResponse)
import sstarlette
from {{cookiecutter.project_slug}} import (models, service_layer, settings,
utils)
async def not_authorized(request, exc):
return JSONResponse(
{"status": False, "msg": "Not Authorized"}, status_code=exc.status_code
)
class CustomStarlette(sstarlette.SStarlette):
def __init__(self, *args, **kwargs):
middlewares = self.populate_middlewares(
{% if cookiecutter.auth_backend == 'y' -%}service_layer.verify_access_token,{% endif %}sentry_dsn=settings.SENTRY_DSN,
debug=settings.DEBUG,
)
super().__init__(
str(settings.DATABASE_URL),
middleware=middlewares,
debug=settings.DEBUG,
replica_database_url=settings.REPLICA_DATABASE_URL,
serverless=settings.ENVIRONMENT == "serverless",
model_initializer=models.init_tables,
exception_handlers={403: not_authorized},
**kwargs,
)
async def initialize_redis(self):
self.redis = await utils.redis_connection()
return self.redis
class Homepage(HTTPEndpoint):
async def get(self, request):
return PlainTextResponse(f"Hello, world!")
app = CustomStarlette(service_layer=service_layer.service, routes=[Route("/",HomePage,methods=['GET'])])
| 34.722222 | 131 | 0.6704 | 182 | 1,875 | 6.725275 | 0.478022 | 0.084967 | 0.029412 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00212 | 0.245333 | 1,875 | 53 | 132 | 35.377358 | 0.862898 | 0 | 0 | 0.047619 | 0 | 0 | 0.028053 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.309524 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
31cbf3a1b8e886162e093974ff14e4266ec99e39 | 762 | py | Python | migration/lib/metadata_view.py | HumanCellAtlas/migration-service | 5f1a6bad11856d745e558b4eed31f4c6374b29a6 | [
"MIT"
] | null | null | null | migration/lib/metadata_view.py | HumanCellAtlas/migration-service | 5f1a6bad11856d745e558b4eed31f4c6374b29a6 | [
"MIT"
] | null | null | null | migration/lib/metadata_view.py | HumanCellAtlas/migration-service | 5f1a6bad11856d745e558b4eed31f4c6374b29a6 | [
"MIT"
] | null | null | null | import json
from .schema_reference import SchemaReference
class MetadataView(dict):
"""
A view representing the data that exists in a metadata file. The contents are stored as a dictionary with a
specific pointer to the metadata schema that was responsible for generating the view of the data as it is currently.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
@staticmethod
def from_file(path_to_metadata_json_file: str):
with open(path_to_metadata_json_file, 'r') as metadata_file:
metadata_dict = json.load(metadata_file)
return MetadataView(**metadata_dict)
@property
def schema_reference(self) -> SchemaReference:
return SchemaReference(self['describedBy'])
| 31.75 | 120 | 0.715223 | 98 | 762 | 5.326531 | 0.510204 | 0.068966 | 0.05364 | 0.068966 | 0.084291 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.211286 | 762 | 23 | 121 | 33.130435 | 0.868552 | 0.293963 | 0 | 0 | 0 | 0 | 0.023166 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.153846 | 0.076923 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
31d3d8098288c380e60e1c2a1a19be2d8f4cf5f1 | 8,925 | py | Python | mpd_dataset_generator.py | richardanarfi/Recsys-Challenge-2018-TeamFL | 81e00a2417d530ea1033dcb22fbe29b7ceb12bb2 | [
"Apache-2.0"
] | null | null | null | mpd_dataset_generator.py | richardanarfi/Recsys-Challenge-2018-TeamFL | 81e00a2417d530ea1033dcb22fbe29b7ceb12bb2 | [
"Apache-2.0"
] | null | null | null | mpd_dataset_generator.py | richardanarfi/Recsys-Challenge-2018-TeamFL | 81e00a2417d530ea1033dcb22fbe29b7ceb12bb2 | [
"Apache-2.0"
] | null | null | null | import sys
import json
import string
import datetime
import os
output_file = open("mpd_dataset.txt", "w", encoding='raw_unicode_escape')
#uniquePlaylists = open("challenge_playlists_unique.txt", "w", encoding='raw_unicode_escape')
#uniqueTracks = open("challenge_tracks_unique.txt", "w", encoding='raw_unicode_escape')
optional_playlist_fields = ["name"]
min_tracks_per_playlist = 5
max_tracks_per_playlist = 250
min_artists_per_playlist = 3
min_albums_per_playlist = 2
max_files_for_quick_processing = 10
#latest_add_ts = int(datetime.datetime(2017, 11, 1).strftime('%s')) * 1000
pids = set()
unique_playlists = set()
unique_tracks = set()
#inputFile = open('mpd.txt')
artist_names = {}
album_names = {}
track_names = {}
gstats = {
'errors': 0
}
def process_mpd(path):
count = 0
#check_challenge_set()
filenames = os.listdir(path)
for filename in sorted(filenames):
if filename.startswith("mpd.slice.") and filename.endswith(".json"):
fullpath = os.sep.join((path, filename))
f = open(fullpath)
js = f.read()
f.close()
slice = json.loads(js)
process_info(slice['info'])
for playlist in slice['playlists']:
#count += 1
#print(count)
#unique_playlists.add(playlist['name'].replace('+', '').replace('~', '').replace('<', '').replace('>', '').replace('=', '').replace('?', '').replace(':', '').replace(';', '').replace('\"', '').replace('\'', '').replace('.', '').replace('_', '').replace('#', '').replace('!', '').replace('@', '').replace('-', '').replace('/', '').replace('&', '').replace('$', '').replace('{', '').replace('}', '').replace('(', '').replace(')', '').replace('^', '').replace('%', '').replace('*', '').replace(',', '').replace(' ', ''))
#print(playlist.items())
for p_name, val in playlist.items():
if p_name in optional_playlist_fields:
#print(playlist['name'])
#val = playlist['name']
#print(val)
#output_file.write('#')
output_file.write(playlist['name'].replace('+', '').replace('~', '').replace('<', '').replace('>', '').replace('=', '').replace('?', '').replace(':', '').replace(';', '').replace('\"', '').replace('\'', '').replace('.', '').replace('_', '').replace('#', '').replace('!', '').replace('@', '').replace('-', '').replace('/', '').replace('&', '').replace('$', '').replace('{', '').replace('}', '').replace('(', '').replace(')', '').replace('^', '').replace('%', '').replace('*', '').replace(',', '').replace(' ', ''))
#output_file.write('#')
output_file.write(',')
#n_tracks = playlist['num_samples']
for track in playlist['tracks']:
track_uri = track['track_uri']
#unique_tracks.add(track['track_uri'])
#uniqueTracks.write(track_uri)
#uniqueTracks.write('\n')
output_file.write(track_uri)
output_file.write(',')
output_file.write('\n')
#for playlist in slice['playlists']:
#for pid in playlist['tracks']:
# print(playlist['pid'])
#print(playlist)
process_playlist(playlist)
#print(count)
count += 1
print(count)
if quick and count > max_files_for_quick_processing:
break
show_summary()
#check_challenge_set()
def show_summary():
tassert(len(pids) == 1000000, "mismatched pids %d %d", len(pids), 1000000)
missing = set()
for pid in range(0, 1000000):
if pid not in pids:
print(pid)
missing.add(pid)
tassert(len(missing) == 0, "missing %d pids", len(missing))
for k, v in gstats.items():
print(k, v)
required_fields = set(['name', 'collaborative', 'pid', 'modified_at', 'num_albums', 'num_tracks', 'num_followers',
'num_tracks', 'num_edits', 'duration_ms', 'num_artists', 'tracks'])
optional_fields = set(['description'])
required_track_fields = set(
['pos', 'artist_name', 'artist_uri', 'album_uri', 'album_name', 'track_uri', 'track_name', 'duration_ms'])
def process_playlist(playlist):
tassert(playlist['pid'] not in pids, "duplicate pid %d", playlist['pid'])
pids.add(playlist['pid'])
#unique_playlists.add(playlist['name'])
tassert(len(playlist['name']) > 0, "zero length playlist title")
tassert(len(playlist['tracks']) >= min_tracks_per_playlist, "min tracks per playlist < %d", min_tracks_per_playlist)
for field in playlist:
tassert(field in required_fields or field in optional_fields, "extra field %s", field)
for field in required_fields:
tassert(field in playlist, "missing field %s", field)
tassert(playlist['num_followers'] >= 1, "too few followers %d", playlist['num_followers'])
tassert(playlist['num_edits'] > 0, "too few edits %d", playlist['num_edits'])
#tassert(playlist['modified_at'] <= latest_add_ts, "modified_at too late %d", playlist['modified_at'])
albums = set()
artists = set()
total_duration = 0
for i, track in enumerate(playlist['tracks']):
for field in track:
tassert(field in required_track_fields, "extra track field %s", field)
for field in required_track_fields:
tassert(field in track, "missing track field %s", field)
tassert(i == track['pos'], "out of order %d %d", i, track['pos'])
artists.add(track['artist_uri'])
albums.add(track['album_uri'])
#unique_tracks.add(track['track_uri'])
total_duration += track['duration_ms']
if track['artist_uri'] not in artist_names:
artist_names[track['artist_uri']] = track['artist_name']
tassert(track['artist_name'] == artist_names[track['artist_uri']], 'mismatch artist name %s %s',
track['artist_name'], artist_names[track['artist_uri']])
if track['album_uri'] not in album_names:
album_names[track['album_uri']] = track['album_name']
tassert(track['album_name'] == album_names[track['album_uri']], 'mismatch album name %s %s',
track['album_name'], album_names[track['album_uri']])
if track['track_uri'] not in track_names:
track_names[track['track_uri']] = track['track_name']
tassert(track['track_name'] == track_names[track['track_uri']], 'mismatch track name %s %s',
track['track_name'], track_names[track['track_uri']])
tassert(is_track_uri(track['track_uri']), "invalid track uri %s", track['track_uri'])
tassert(is_album_uri(track['album_uri']), "invalid album uri %s", track['album_uri'])
tassert(is_artist_uri(track['artist_uri']), "invalid artst uri %s", track['artist_uri'])
tassert(len(artists) >= min_artists_per_playlist, 'too few artists %d', len(artists))
tassert(len(albums) >= min_albums_per_playlist, 'too few albums %d', len(albums))
tassert(len(albums) == playlist['num_albums'], 'nalbum mismatch %d %d', len(albums), playlist['num_albums'])
tassert(len(artists) == playlist['num_artists'], 'nartist mismatch %d %d', len(artists), playlist['num_artists'])
tassert(len(playlist['tracks']) == playlist['num_tracks'], 'ntracks mismatch %d %d', len(playlist['tracks']),
playlist['num_tracks'])
tassert(total_duration == playlist['duration_ms'], "mismiatch duration %d %d", total_duration,
playlist['duration_ms'])
required_info_fields = ['generated_on', 'slice', 'version']
def process_info(info):
for field in info:
tassert(field in required_info_fields, "extra info field %s", field)
for field in required_info_fields:
tassert(field in info, "missing info field %s", field)
def is_track_uri(uri):
return uri.startswith("spotify:track:")
def is_album_uri(uri):
return uri.startswith("spotify:album:")
def is_artist_uri(uri):
return uri.startswith("spotify:artist:")
def tassert(condition, fmtstring, *args):
if not condition:
gstats['errors'] += 1
print(fmtstring % args)
if __name__ == '__main__':
path = sys.argv[1]
quick = False
if len(sys.argv) > 2 and sys.argv[2] == '--quick':
quick = True
process_mpd(path)
#uniqueTracks.write(str(unique_tracks))
#uniqueTracks.write('\n')
#uniquePlaylists.write(str(unique_playlists))
#uniquePlaylists.write('\n')
#print (len(unique_tracks))
| 42.099057 | 538 | 0.57916 | 1,009 | 8,925 | 4.915758 | 0.154609 | 0.152419 | 0.220161 | 0.282258 | 0.339113 | 0.230847 | 0.186089 | 0.129839 | 0.083871 | 0.083871 | 0 | 0.007976 | 0.241457 | 8,925 | 211 | 539 | 42.298578 | 0.724668 | 0.174118 | 0 | 0.015267 | 0 | 0.007634 | 0.224733 | 0 | 0 | 0 | 0 | 0 | 0.206107 | 1 | 0.061069 | false | 0 | 0.038168 | 0.022901 | 0.122137 | 0.030534 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
31e7e6cb17da2d0af625a3ee85717a2ef243367c | 13,744 | py | Python | pysnmp/EXTRAHOP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/EXTRAHOP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/EXTRAHOP-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module EXTRAHOP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/EXTRAHOP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:53:00 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, IpAddress, Counter64, enterprises, MibIdentifier, NotificationType, Gauge32, Unsigned32, TimeTicks, ObjectIdentity, ModuleIdentity, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "IpAddress", "Counter64", "enterprises", "MibIdentifier", "NotificationType", "Gauge32", "Unsigned32", "TimeTicks", "ObjectIdentity", "ModuleIdentity", "Counter32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
extrahop = ModuleIdentity((1, 3, 6, 1, 4, 1, 32015))
extrahop.setRevisions(('2015-05-08 00:00',))
if mibBuilder.loadTexts: extrahop.setLastUpdated('201505080000Z')
if mibBuilder.loadTexts: extrahop.setOrganization('ExtraHop Networks')
extrahopInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 32015, 0))
extrahopInfoVersionString = MibScalar((1, 3, 6, 1, 4, 1, 32015, 0, 0), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopInfoVersionString.setStatus('current')
extrahopInfoVersionMajor = MibScalar((1, 3, 6, 1, 4, 1, 32015, 0, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopInfoVersionMajor.setStatus('current')
extrahopInfoVersionMinor = MibScalar((1, 3, 6, 1, 4, 1, 32015, 0, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopInfoVersionMinor.setStatus('current')
extrahopInfoVersionBranchRelease = MibScalar((1, 3, 6, 1, 4, 1, 32015, 0, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopInfoVersionBranchRelease.setStatus('current')
extrahopInfoVersionRevision = MibScalar((1, 3, 6, 1, 4, 1, 32015, 0, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopInfoVersionRevision.setStatus('current')
extrahopAlert = MibIdentifier((1, 3, 6, 1, 4, 1, 32015, 1))
extrahopTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 32015, 2))
extrahopObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 32015, 4))
extrahopObjectGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 32015, 4, 1)).setObjects(("EXTRAHOP-MIB", "extrahopAlertName"), ("EXTRAHOP-MIB", "extrahopAlertComment"), ("EXTRAHOP-MIB", "extrahopAlertObjectType"), ("EXTRAHOP-MIB", "extrahopAlertObjectName"), ("EXTRAHOP-MIB", "extrahopAlertExpr"), ("EXTRAHOP-MIB", "extrahopAlertValue"), ("EXTRAHOP-MIB", "extrahopAlertTime"), ("EXTRAHOP-MIB", "extrahopAlertObjectId"), ("EXTRAHOP-MIB", "extrahopAlertObjectStrId"), ("EXTRAHOP-MIB", "extrahopAlertObjectMACAddr"), ("EXTRAHOP-MIB", "extrahopAlertObjectIPAddr"), ("EXTRAHOP-MIB", "extrahopAlertObjectTags"), ("EXTRAHOP-MIB", "extrahopAlertObjectURL"), ("EXTRAHOP-MIB", "extrahopAlertStatName"), ("EXTRAHOP-MIB", "extrahopAlertStatFieldName"), ("EXTRAHOP-MIB", "extrahopAlertSeverity"), ("EXTRAHOP-MIB", "extrahopStatsPktsSinceBoot"), ("EXTRAHOP-MIB", "extrahopStatsBytesSinceBoot"), ("EXTRAHOP-MIB", "extrahopStorageAlertRole"), ("EXTRAHOP-MIB", "extrahopStorageAlertDevice"), ("EXTRAHOP-MIB", "extrahopStorageAlertStatus"), ("EXTRAHOP-MIB", "extrahopStorageAlertDetails"), ("EXTRAHOP-MIB", "extrahopStorageAlertSeverity"), ("EXTRAHOP-MIB", "extrahopStorageAlertMachine"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
extrahopObjectGroup = extrahopObjectGroup.setStatus('current')
extrahopNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 32015, 4, 2)).setObjects(("EXTRAHOP-MIB", "extrahopAlertTrap"), ("EXTRAHOP-MIB", "extrahopStorageAlertTrap"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
extrahopNotificationGroup = extrahopNotificationGroup.setStatus('current')
extrahopAlertTrap = NotificationType((1, 3, 6, 1, 4, 1, 32015, 2, 1)).setObjects(("EXTRAHOP-MIB", "extrahopAlertName"), ("EXTRAHOP-MIB", "extrahopAlertComment"), ("EXTRAHOP-MIB", "extrahopAlertObjectType"), ("EXTRAHOP-MIB", "extrahopAlertObjectName"), ("EXTRAHOP-MIB", "extrahopAlertExpr"), ("EXTRAHOP-MIB", "extrahopAlertValue"), ("EXTRAHOP-MIB", "extrahopAlertTime"), ("EXTRAHOP-MIB", "extrahopAlertObjectId"), ("EXTRAHOP-MIB", "extrahopAlertObjectStrId"), ("EXTRAHOP-MIB", "extrahopAlertObjectMACAddr"), ("EXTRAHOP-MIB", "extrahopAlertObjectIPAddr"), ("EXTRAHOP-MIB", "extrahopAlertObjectTags"), ("EXTRAHOP-MIB", "extrahopAlertObjectURL"), ("EXTRAHOP-MIB", "extrahopAlertStatName"), ("EXTRAHOP-MIB", "extrahopAlertStatFieldName"), ("EXTRAHOP-MIB", "extrahopAlertSeverity"))
if mibBuilder.loadTexts: extrahopAlertTrap.setStatus('current')
extrahopAlertName = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertName.setStatus('current')
extrahopAlertComment = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertComment.setStatus('current')
extrahopAlertObjectType = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectType.setStatus('current')
extrahopAlertObjectName = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectName.setStatus('current')
extrahopAlertExpr = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertExpr.setStatus('current')
extrahopAlertValue = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertValue.setStatus('current')
extrahopAlertTime = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertTime.setStatus('current')
extrahopAlertObjectId = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectId.setStatus('current')
extrahopAlertObjectStrId = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectStrId.setStatus('current')
extrahopAlertObjectMACAddr = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectMACAddr.setStatus('current')
extrahopAlertObjectIPAddr = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectIPAddr.setStatus('current')
extrahopAlertObjectTags = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectTags.setStatus('current')
extrahopAlertObjectURL = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertObjectURL.setStatus('current')
extrahopAlertStatName = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertStatName.setStatus('current')
extrahopAlertStatFieldName = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 15), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertStatFieldName.setStatus('current')
extrahopAlertSeverity = MibScalar((1, 3, 6, 1, 4, 1, 32015, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("emergency", 0), ("alert", 1), ("critical", 2), ("error", 3), ("warning", 4), ("notice", 5), ("info", 6), ("debug", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopAlertSeverity.setStatus('current')
extrahopStats = MibIdentifier((1, 3, 6, 1, 4, 1, 32015, 3))
extrahopStatsPktsSinceBoot = MibScalar((1, 3, 6, 1, 4, 1, 32015, 3, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStatsPktsSinceBoot.setStatus('current')
extrahopStatsBytesSinceBoot = MibScalar((1, 3, 6, 1, 4, 1, 32015, 3, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStatsBytesSinceBoot.setStatus('current')
extrahopStorageAlert = MibIdentifier((1, 3, 6, 1, 4, 1, 32015, 5))
extrahopStorageAlertTrap = NotificationType((1, 3, 6, 1, 4, 1, 32015, 2, 2)).setObjects(("EXTRAHOP-MIB", "extrahopStorageAlertRole"), ("EXTRAHOP-MIB", "extrahopStorageAlertDevice"), ("EXTRAHOP-MIB", "extrahopStorageAlertStatus"), ("EXTRAHOP-MIB", "extrahopStorageAlertDetails"), ("EXTRAHOP-MIB", "extrahopStorageAlertSeverity"), ("EXTRAHOP-MIB", "extrahopStorageAlertMachine"))
if mibBuilder.loadTexts: extrahopStorageAlertTrap.setStatus('current')
extrahopStorageAlertRole = MibScalar((1, 3, 6, 1, 4, 1, 32015, 5, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStorageAlertRole.setStatus('current')
extrahopStorageAlertDevice = MibScalar((1, 3, 6, 1, 4, 1, 32015, 5, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStorageAlertDevice.setStatus('current')
extrahopStorageAlertStatus = MibScalar((1, 3, 6, 1, 4, 1, 32015, 5, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStorageAlertStatus.setStatus('current')
extrahopStorageAlertDetails = MibScalar((1, 3, 6, 1, 4, 1, 32015, 5, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStorageAlertDetails.setStatus('current')
extrahopStorageAlertSeverity = MibScalar((1, 3, 6, 1, 4, 1, 32015, 5, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("emergency", 0), ("alert", 1), ("critical", 2), ("error", 3), ("warning", 4), ("notice", 5), ("info", 6), ("debug", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStorageAlertSeverity.setStatus('current')
extrahopStorageAlertMachine = MibScalar((1, 3, 6, 1, 4, 1, 32015, 5, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: extrahopStorageAlertMachine.setStatus('current')
mibBuilder.exportSymbols("EXTRAHOP-MIB", extrahop=extrahop, extrahopAlertComment=extrahopAlertComment, extrahopAlertExpr=extrahopAlertExpr, extrahopStorageAlertDevice=extrahopStorageAlertDevice, extrahopStatsBytesSinceBoot=extrahopStatsBytesSinceBoot, extrahopAlertObjectTags=extrahopAlertObjectTags, extrahopAlert=extrahopAlert, extrahopStorageAlertStatus=extrahopStorageAlertStatus, extrahopAlertStatFieldName=extrahopAlertStatFieldName, extrahopStorageAlertMachine=extrahopStorageAlertMachine, extrahopStorageAlertTrap=extrahopStorageAlertTrap, extrahopAlertStatName=extrahopAlertStatName, extrahopStats=extrahopStats, extrahopStorageAlertRole=extrahopStorageAlertRole, extrahopStorageAlertDetails=extrahopStorageAlertDetails, extrahopObjectGroup=extrahopObjectGroup, extrahopInfoVersionRevision=extrahopInfoVersionRevision, extrahopAlertSeverity=extrahopAlertSeverity, extrahopAlertObjectStrId=extrahopAlertObjectStrId, extrahopTraps=extrahopTraps, extrahopStatsPktsSinceBoot=extrahopStatsPktsSinceBoot, PYSNMP_MODULE_ID=extrahop, extrahopAlertTime=extrahopAlertTime, extrahopInfoVersionString=extrahopInfoVersionString, extrahopObjects=extrahopObjects, extrahopAlertObjectName=extrahopAlertObjectName, extrahopAlertObjectURL=extrahopAlertObjectURL, extrahopNotificationGroup=extrahopNotificationGroup, extrahopAlertObjectId=extrahopAlertObjectId, extrahopAlertObjectIPAddr=extrahopAlertObjectIPAddr, extrahopInfoVersionMinor=extrahopInfoVersionMinor, extrahopStorageAlert=extrahopStorageAlert, extrahopAlertValue=extrahopAlertValue, extrahopAlertTrap=extrahopAlertTrap, extrahopInfo=extrahopInfo, extrahopInfoVersionMajor=extrahopInfoVersionMajor, extrahopAlertObjectType=extrahopAlertObjectType, extrahopStorageAlertSeverity=extrahopStorageAlertSeverity, extrahopAlertName=extrahopAlertName, extrahopInfoVersionBranchRelease=extrahopInfoVersionBranchRelease, extrahopAlertObjectMACAddr=extrahopAlertObjectMACAddr)
| 147.784946 | 1,919 | 0.784197 | 1,306 | 13,744 | 8.251149 | 0.128637 | 0.05206 | 0.011136 | 0.014848 | 0.527561 | 0.509187 | 0.499907 | 0.479955 | 0.463808 | 0.402097 | 0 | 0.058934 | 0.066647 | 13,744 | 92 | 1,920 | 149.391304 | 0.781104 | 0.023137 | 0 | 0.023529 | 0 | 0 | 0.20638 | 0.070582 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.070588 | 0 | 0.070588 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9ee6451046bc3f865d22e614e1af6338b16ed275 | 4,175 | py | Python | swagger_server/models/impedance.py | garagonc/simulation-engine | c129f0bf601e0d56d924c9e5fa2cf94f7e31a356 | [
"Apache-2.0"
] | 3 | 2019-06-24T09:02:21.000Z | 2020-01-30T10:37:46.000Z | swagger_server/models/impedance.py | linksmart/simulation-engine | c129f0bf601e0d56d924c9e5fa2cf94f7e31a356 | [
"Apache-2.0"
] | null | null | null | swagger_server/models/impedance.py | linksmart/simulation-engine | c129f0bf601e0d56d924c9e5fa2cf94f7e31a356 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class Impedance(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, r1: float=None, r0: float=None, x1: float=None, x0: float=None, c1: float=None, c0: float=None): # noqa: E501
"""Impedance - a model defined in Swagger
:param r1: The r1 of this Impedance. # noqa: E501
:type r1: float
:param r0: The r0 of this Impedance. # noqa: E501
:type r0: float
:param x1: The x1 of this Impedance. # noqa: E501
:type x1: float
:param x0: The x0 of this Impedance. # noqa: E501
:type x0: float
:param c1: The c1 of this Impedance. # noqa: E501
:type c1: float
:param c0: The c0 of this Impedance. # noqa: E501
:type c0: float
"""
self.swagger_types = {
'r1': float,
'r0': float,
'x1': float,
'x0': float,
'c1': float,
'c0': float
}
self.attribute_map = {
'r1': 'R1',
'r0': 'R0',
'x1': 'X1',
'x0': 'X0',
'c1': 'C1',
'c0': 'C0'
}
self._r1 = r1
self._r0 = r0
self._x1 = x1
self._x0 = x0
self._c1 = c1
self._c0 = c0
@classmethod
def from_dict(cls, dikt) -> 'Impedance':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Impedance of this Impedance. # noqa: E501
:rtype: Impedance
"""
return util.deserialize_model(dikt, cls)
@property
def r1(self) -> float:
"""Gets the r1 of this Impedance.
:return: The r1 of this Impedance.
:rtype: float
"""
return self._r1
@r1.setter
def r1(self, r1: float):
"""Sets the r1 of this Impedance.
:param r1: The r1 of this Impedance.
:type r1: float
"""
self._r1 = r1
@property
def r0(self) -> float:
"""Gets the r0 of this Impedance.
:return: The r0 of this Impedance.
:rtype: float
"""
return self._r0
@r0.setter
def r0(self, r0: float):
"""Sets the r0 of this Impedance.
:param r0: The r0 of this Impedance.
:type r0: float
"""
self._r0 = r0
@property
def x1(self) -> float:
"""Gets the x1 of this Impedance.
:return: The x1 of this Impedance.
:rtype: float
"""
return self._x1
@x1.setter
def x1(self, x1: float):
"""Sets the x1 of this Impedance.
:param x1: The x1 of this Impedance.
:type x1: float
"""
self._x1 = x1
@property
def x0(self) -> float:
"""Gets the x0 of this Impedance.
:return: The x0 of this Impedance.
:rtype: float
"""
return self._x0
@x0.setter
def x0(self, x0: float):
"""Sets the x0 of this Impedance.
:param x0: The x0 of this Impedance.
:type x0: float
"""
self._x0 = x0
@property
def c1(self) -> float:
"""Gets the c1 of this Impedance.
:return: The c1 of this Impedance.
:rtype: float
"""
return self._c1
@c1.setter
def c1(self, c1: float):
"""Sets the c1 of this Impedance.
:param c1: The c1 of this Impedance.
:type c1: float
"""
self._c1 = c1
@property
def c0(self) -> float:
"""Gets the c0 of this Impedance.
:return: The c0 of this Impedance.
:rtype: float
"""
return self._c0
@c0.setter
def c0(self, c0: float):
"""Sets the c0 of this Impedance.
:param c0: The c0 of this Impedance.
:type c0: float
"""
self._c0 = c0
| 21.410256 | 133 | 0.516168 | 528 | 4,175 | 4.015152 | 0.130682 | 0.087736 | 0.21934 | 0.062736 | 0.441038 | 0.285849 | 0.251887 | 0 | 0 | 0 | 0 | 0.062716 | 0.377485 | 4,175 | 194 | 134 | 21.520619 | 0.752982 | 0.416766 | 0 | 0.264706 | 0 | 0 | 0.023672 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.205882 | false | 0 | 0.073529 | 0 | 0.397059 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9ef207ec7b085b352bf292f40a7895851ed387e0 | 454 | py | Python | demo/demoproject/apache/urls.py | timgates42/django-downloadview | cd37fd5084937abf13cabe73cf63623cc09289b7 | [
"BSD-3-Clause"
] | null | null | null | demo/demoproject/apache/urls.py | timgates42/django-downloadview | cd37fd5084937abf13cabe73cf63623cc09289b7 | [
"BSD-3-Clause"
] | null | null | null | demo/demoproject/apache/urls.py | timgates42/django-downloadview | cd37fd5084937abf13cabe73cf63623cc09289b7 | [
"BSD-3-Clause"
] | null | null | null | """URL mapping."""
from django.conf.urls import url
from demoproject.apache import views
from demoproject.compat import patterns
urlpatterns = patterns(
"demoproject.apache.views",
url(
r"^optimized-by-middleware/$",
views.optimized_by_middleware,
name="optimized_by_middleware",
),
url(
r"^optimized-by-decorator/$",
views.optimized_by_decorator,
name="optimized_by_decorator",
),
)
| 22.7 | 39 | 0.665198 | 49 | 454 | 6 | 0.387755 | 0.22449 | 0.214286 | 0.102041 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.218062 | 454 | 19 | 40 | 23.894737 | 0.828169 | 0.026432 | 0 | 0.25 | 0 | 0 | 0.275229 | 0.275229 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1875 | 0 | 0.1875 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
9efeaec8a8f55934043b5c8217b12bfc5e7007cb | 183 | py | Python | principal/urls.py | ProjetoHavoc/Prometheus | 244d0cd05ad11844640cac860975eb177197f2db | [
"MIT"
] | null | null | null | principal/urls.py | ProjetoHavoc/Prometheus | 244d0cd05ad11844640cac860975eb177197f2db | [
"MIT"
] | null | null | null | principal/urls.py | ProjetoHavoc/Prometheus | 244d0cd05ad11844640cac860975eb177197f2db | [
"MIT"
] | null | null | null | from django.urls import path
from .views import PrincipalIndex, Sobre
urlpatterns = [
path('', PrincipalIndex, name='index'),
path('sobre/', Sobre, name='sobre'),
] | 15.25 | 43 | 0.650273 | 20 | 183 | 5.95 | 0.55 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.202186 | 183 | 12 | 44 | 15.25 | 0.815068 | 0 | 0 | 0 | 0 | 0 | 0.086957 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
730b7104d0640ceac5785036383927c143768bee | 3,593 | py | Python | descale.py | DJATOM/vapoursynth-descale | 2d2675a5b6ee48eb1f3431eb935f5c3f1f606b9c | [
"WTFPL"
] | 1 | 2020-06-01T10:11:51.000Z | 2020-06-01T10:11:51.000Z | descale.py | DJATOM/vapoursynth-descale | 2d2675a5b6ee48eb1f3431eb935f5c3f1f606b9c | [
"WTFPL"
] | null | null | null | descale.py | DJATOM/vapoursynth-descale | 2d2675a5b6ee48eb1f3431eb935f5c3f1f606b9c | [
"WTFPL"
] | null | null | null | from vapoursynth import core, GRAYS, RGBS, GRAY, YUV, RGB # You need Vapoursynth R37 or newer
from functools import partial
# If yuv444 is True chroma will be upscaled instead of downscaled
# If gray is True the output will be grayscale
def Debilinear(src, width, height, yuv444=False, gray=False, chromaloc=None, opt=None):
return Descale(src, width, height, kernel='bilinear', b=None, c=None, taps=None, yuv444=yuv444, gray=gray, chromaloc=chromaloc, opt=opt)
def Debicubic(src, width, height, b=0.0, c=0.5, yuv444=False, gray=False, chromaloc=None, opt=None):
return Descale(src, width, height, kernel='bicubic', b=b, c=c, taps=None, yuv444=yuv444, gray=gray, chromaloc=chromaloc, opt=opt)
def Delanczos(src, width, height, taps=3, yuv444=False, gray=False, chromaloc=None, opt=None):
return Descale(src, width, height, kernel='lanczos', b=None, c=None, taps=taps, yuv444=yuv444, gray=gray, chromaloc=chromaloc, opt=opt)
def Despline16(src, width, height, yuv444=False, gray=False, chromaloc=None, opt=None):
return Descale(src, width, height, kernel='spline16', b=None, c=None, taps=None, yuv444=yuv444, gray=gray, chromaloc=chromaloc, opt=opt)
def Despline36(src, width, height, yuv444=False, gray=False, chromaloc=None, opt=None):
return Descale(src, width, height, kernel='spline36', b=None, c=None, taps=None, yuv444=yuv444, gray=gray, chromaloc=chromaloc, opt=opt)
def Despline64(src, width, height, yuv444=False, gray=False, chromaloc=None, opt=None):
return Descale(src, width, height, kernel='spline64', b=None, c=None, taps=None, yuv444=yuv444, gray=gray, chromaloc=chromaloc, opt=opt)
def Descale(src, width, height, kernel='bilinear', b=0.0, c=0.5, taps=3, yuv444=False, gray=False, chromaloc=None, opt=None):
src_f = src.format
src_cf = src_f.color_family
src_st = src_f.sample_type
src_bits = src_f.bits_per_sample
src_sw = src_f.subsampling_w
src_sh = src_f.subsampling_h
descale_filter = get_filter(b, c, taps, kernel)
if src_cf == RGB and not gray:
rgb = descale_filter(to_rgbs(src), width, height, opt=opt)
return rgb.resize.Point(format=src_f.id)
y = descale_filter(to_grays(src), width, height, opt=opt)
y_f = core.register_format(GRAY, src_st, src_bits, 0, 0)
y = y.resize.Point(format=y_f.id)
if src_cf == GRAY or gray:
return y
if not yuv444 and ((width % 2 and src_sw) or (height % 2 and src_sh)):
raise ValueError('Descale: The output dimension and the subsampling are incompatible.')
uv_f = core.register_format(src_cf, src_st, src_bits, 0 if yuv444 else src_sw, 0 if yuv444 else src_sh)
uv = src.resize.Spline36(width, height, format=uv_f.id, chromaloc_s=chromaloc)
return core.std.ShufflePlanes([y,uv], [0,1,2], YUV)
# Helpers
def to_grays(src):
return src.resize.Point(format=GRAYS)
def to_rgbs(src):
return src.resize.Point(format=RGBS)
def get_plane(src, plane):
return core.std.ShufflePlanes(src, plane, GRAY)
def get_filter(b, c, taps, kernel):
kernel = kernel.lower()
if kernel == 'bilinear':
return core.descale.Debilinear
elif kernel == 'bicubic':
return partial(core.descale.Debicubic, b=b, c=c)
elif kernel == 'lanczos':
return partial(core.descale.Delanczos, taps=taps)
elif kernel == 'spline16':
return core.descale.Despline16
elif kernel == 'spline36':
return core.descale.Despline36
elif kernel == 'spline64':
return core.descale.Despline64
else:
raise ValueError('Descale: Invalid kernel specified.')
| 41.77907 | 140 | 0.704147 | 552 | 3,593 | 4.5 | 0.184783 | 0.070853 | 0.084541 | 0.056361 | 0.47182 | 0.427134 | 0.382448 | 0.364734 | 0.364734 | 0.345813 | 0 | 0.038835 | 0.168661 | 3,593 | 85 | 141 | 42.270588 | 0.792769 | 0.041748 | 0 | 0 | 0 | 0 | 0.058464 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.192982 | false | 0 | 0.035088 | 0.157895 | 0.54386 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
730da005c9636cfc340356f2a389e631c159b1a8 | 286 | py | Python | tools/forensics.py | LucaRibeiro/pentestools | 2e7a6b9bf51a84aec90944c50a23e882d184ccdc | [
"MIT"
] | 1 | 2021-02-18T16:15:25.000Z | 2021-02-18T16:15:25.000Z | tools/forensics.py | LucaRibeiro/Pentools | 2e7a6b9bf51a84aec90944c50a23e882d184ccdc | [
"MIT"
] | null | null | null | tools/forensics.py | LucaRibeiro/Pentools | 2e7a6b9bf51a84aec90944c50a23e882d184ccdc | [
"MIT"
] | null | null | null | #!/usr/bin/python3
list = ["Binwalk","bulk-extractor","Capstone","chntpw","Cuckoo",
"dc3dd","ddrescue","DFF","diStorm3","Dumpzilla","extundelete",
"Foremost","Galleta","Guymager","iPhone Backup Analyzer","p0f",
"pdf-parser","pdfid","pdgmail","peepdf","RegRipper","Volatility","Xplico"] | 47.666667 | 74 | 0.695804 | 31 | 286 | 6.419355 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01444 | 0.031469 | 286 | 6 | 74 | 47.666667 | 0.703971 | 0.059441 | 0 | 0 | 0 | 0 | 0.69145 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
73134c2beac60a875efa798ea2a7275583a19d03 | 19,453 | py | Python | LM/bin/SELSEmailUtil.py | ncsa/sels | 0b9adb7c9db965a08e87baecf08385c68bb0f620 | [
"NCSA"
] | null | null | null | LM/bin/SELSEmailUtil.py | ncsa/sels | 0b9adb7c9db965a08e87baecf08385c68bb0f620 | [
"NCSA"
] | null | null | null | LM/bin/SELSEmailUtil.py | ncsa/sels | 0b9adb7c9db965a08e87baecf08385c68bb0f620 | [
"NCSA"
] | null | null | null | #!/usr/bin/python
# Created by: SELS Team
#
# Description: Email Utilities used by SELSModerator.py
#
# License: This code is a part of SELS distribution under NCSA/UIUC Open Source License (refer NCSA-license.txt)
############################################################################################################################
import os
import sys
import string
import smtplib
import socket
import getopt
import getpass
#import threading
import time
from SELSLMUtil import *
from SELSLMConfig import * # configuration file
def sendMail( msg, list, user):
listSELSPath = LM_PATH + '/lists/' + list
# read list config file
LMConfig = {}
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
fromaddr = ("From: %s\n"%(LM_EMAIL))
if user == None:
toaddr = (["To: %s@%s\n"%(list, SMTPDomain )])
else:
toaddr = (["To: %s\n"%(user)])
try:
server = smtplib.SMTP(MySMTPServer, int(MySMTPPort))
refused = server.sendmail( fromaddr,
toaddr, msg)
except smtplib.SMTPRecipientsRefused, e:
print ('SMTP ReceipientRefused exception: ' + str(e))
print 'Program will exit now !'
sys.exit()
except smtplib.SMTPResponseException, e:
print ('SMTP Session failure: %s, %s', e.smtp_code, e.smtp_error)
print 'Program will exit now !'
sys.exit()
except socket.error, e:
print( 'socket error:' + str(e) )
print 'Program will exit now !'
sys.exit()
except IOError, e:
print( 'IOError:' + str(e))
print 'Program will exit now!'
sys.exit()
except smtplib.SMTPException,e:
print( 'SMTPException:' + str(e))
print 'Program will exit now!'
sys.exit()
def buildAcceptMsg(list, user, pubkey, seckey, LMpubkey, LKpubkey, LSadminkey, LKrevcert ):
msg = "list: %s\n"%(list)
msg += "List subscriber: %s\n"%(user)
msg += "Revocation Certificate for previous List Key(LK) if any:\n%s\n"%(LKrevcert)
msg += "List subscriber encryption (public) key:\n%s\n"%(pubkey)
msg += "List subscriber decryption (private) key:\n%s\n"%(seckey)
#msg += "LMpubkey:\n%s\n"%(LMpubkey)
msg += "List encryption (public) key:\n%s\n"%(LKpubkey)
msg += "List Server Administrator\'s signature verification (public) key:\n%s\n"%(LSadminkey)
return msg
def buildUpdateMsg(list ):
msg = "cmd: Update\n"
msg += "list: %s\n"%(list)
return msg
def buildJoinMsg(list, user, pubkey, rand, email ):
msg = "cmd: Join\n"
msg += "list: %s\n"%(list)
msg += "user: %s\n"%(user)
msg += "pubkey:\n%s\n"%(pubkey)
msg += "random: %s\n"%(rand)
msg += "LMEmail: %s\n"%(email)
return msg
def buildUnsubscribeMsg(list, user ):
msg = "cmd: Unsubscribe\n"
msg += "list: %s\n"%(list)
msg += "user: %s\n"%(user)
return msg
def buildCreateListMsg( list, g, p, q, email ):
msg = "cmd: Create\n"
msg += "list: %s\n"%(list)
msg += "g: %s\n"%(g)
msg += "p: %s\n"%(p)
msg += "q: %s\n"%(q)
msg += "LMEmail: %s\n"%(email)
return msg
def buildLKPubKeyMsg( list, LKkeyid, LKprimkeyid, email ):
msg = "cmd: LKpubkey\n"
msg += "list: %s\n"%(list)
msg += "LKkeyid: %s\n"%(LKkeyid)
msg += "LKprimkeyid: %s\n"%(LKprimkeyid)
msg += "LMEmail: %s\n"%(email)
return msg
def signMsg(msg, list, user, passphrase):
listSELSPath = LM_PATH + '/lists/' + list
gnupg.passphrase = passphrase
inFile = os.path.normpath("%s/msg"%(listSELSPath))
outFile = os.path.normpath("%s/msg.asc"%(listSELSPath))
try:
fp = open( inFile, "w")
fp.write(msg)
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
else:
fp.close()
try:
os.remove(outFile)
except:
None
params = ['--always-trust','--batch']
params.append('-u')
params.append('"%s"'%(user))
params.append('--sign')
params.append( '"%s"'%(inFile))
signed = None
try:
out, err = gnupg.run(params)
print err
if err.find("passphrase") <> -1:
return err
elif err.find("available") <> -1:
print 'Signature Key not found in keyring. Either run option --createLMkey again or import the key manually into %s keyring'%(list)
print ''
usage()
else:
try:
fp = open(outFile, "r")
signed = fp.read()
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
else:
fp.close()
except IOError:
print "Error signing message: %s"%(err)
try:
os.remove(inFile)
os.remove(outFile)
except:
None
return signed
def encMsg(msg, list, user):
listSELSPath = LM_PATH + '/lists/' + list
gnupg.passphrase = LM_PASS
inFile = os.path.normpath("%s/msg"%(listSELSPath))
outFile = os.path.normpath("%s/msg.asc"%(listSELSPath))
try:
fp = open( inFile, "w")
fp.write(msg)
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
else:
fp.close()
try:
os.remove(outFile)
except:
None
params = ['--always-trust','--batch', '-r', '"%s"'%(user), '-e']
params.append( '"%s"'%(inFile))
signed = None
out, err = gnupg.run(params)
try:
fp = open(outFile, "r")
signed = fp.read()
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
else:
fp.close()
try:
os.remove(inFile)
os.remove(outFile)
except:
None
return signed
def encPassMsg(msg, list, passphrase):
listSELSPath = LM_PATH + '/lists/' + list
inFile = os.path.normpath("%s/msg"%(listSELSPath))
outFile = os.path.normpath("%s/msg.asc"%(listSELSPath))
try:
fp = open( inFile, "w")
fp.write(msg)
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
else:
fp.close()
try:
os.remove( outFile )
except:
None
gnupg.passphrase = passphrase
params = ['--always-trust','--batch', "--cipher-algo CAST5" , '-c', '%s'%(inFile)]
encrypted = None
try:
out, err = gnupg.run(params)
try:
fp = open(outFile, "r")
encrypted = fp.read()
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
else:
fp.close()
except IOError:
print "Error encrypting message: %s"%(err)
try:
os.remove(inFile)
os.remove(outFile)
except:
None
return encrypted
def createList( list, paramfile, config, debugflag ):
global LM_ID, LM_SIG_ID, LM_PASS, LM_EMAIL, LS_ID, LS_EMAIL, SMTPDomain, MySMTPServer, MySMTPPort
listSELSPath = LM_PATH + '/lists/' + list
gnupg.options.homedir = listSELSPath
LMConfig = {}
if config == None:
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
else:
LMConfig = config
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
# read El-Gamal parameters, p,g,q
paramfile = os.path.normpath(paramfile)
fp = open(paramfile, 'r')
params = fp.read()
fp.close()
params = params.splitlines()
g = ''
p = ''
q = ''
for line in params:
line = line.strip()
wlist = string.split(line)
if wlist[0] == 'g:':
g = wlist[1]
elif wlist[0] == 'p:':
p = wlist[1]
elif wlist[0] == 'q:':
q = wlist[1]
message = buildCreateListMsg(list, g, p, q, LM_EMAIL)
# sign a message
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
while msg.find("passphrase") <> -1:
print 'Try again!'
pass1 = getpass.getpass("Enter passphrase for LM signing key: ")
LMConfig["LM_SIG_PASS"]=pass1
LM_SIG_PASS = pass1
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s@%s\n"%(list, SMTPDomain )
header += "Subject: Create %s\n\n"%(list)
msg = header + msg + '\n'
if debugflag:
print msg
sendMail( msg, list, None)
headerls = "From: %s\n"%(LM_EMAIL)
headerls += "To: %s\n"%(LS_EMAIL)
headerls += "Subject: List %s created by %s"%(list, LM_EMAIL)
msgls = headerls + '\n'
if debugflag:
print msgls
sendMail(msgls, list, LS_EMAIL)
def accept( list, user, userpass, pubfile, secfile, LMpubfile, LSadminpubfile, LKpubfile, LKrevcert,config, debugflag ):
global LM_ID, LM_SIG_ID, LM_PASS, LM_EMAIL, LS_ID, LS_EMAIL, SMTPDomain, MySMTPServer, MySMTPPort
listSELSPath = LM_PATH + '/lists/' + list
instrFilePath = LM_PATH+ '/bin/' + 'instructions.txt'
gnupg.options.homedir = listSELSPath
LMConfig = {}
if config == None:
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
else:
LMConfig = config
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
try:
# read public key of user
fp = open(os.path.normpath(pubfile), 'r')
pubkey = fp.read()
fp.close()
# read random number from a file for key generation
fp = open(os.path.normpath(secfile), 'r')
seckey = fp.read()
fp.close()
fp = open(os.path.normpath(LMpubfile), 'r')
LMpubkey = fp.read()
fp.close()
fp = open(os.path.normpath(LSadminpubfile), 'r')
LSadminkey = fp.read()
fp.close()
fp = open(os.path.normpath(LKpubfile), 'r')
LKpubkey = fp.read()
fp.close()
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
return False
sys.exit()
# Instruction email
try:
fi = open(instrFilePath, 'r')
instrbody = fi.read()
except IOError:
print 'instructions.txt is missing ! Download code again!'
sys.exit()
else:
fi.close()
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s\n"%(user)
header += "Subject: Step1: Instructions for setting up keys for %s subscribed to %s\n"%(user, list)
line1 = "You will receive two emails from the List Moderator %s. Please follow the instructions described here.\n"%(LM_EMAIL)
msg = header + line1 + '\n' + instrbody + '\n'
if debugflag:
print msg
# send to user
sendMail(msg, list, user)
time.sleep(1)
# LM pub email
line1 = "Import the LM %s public key and place trust in it. "%(LM_EMAIL)
line2 = "To do so refer to the email sent by %s with subject "%(LM_EMAIL)
line3 = "\"Instructions for setting up keys for %s subscribed to %s\"\n"%(user, list)
msg = LMpubkey
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s\n"%(user)
header += "Subject: Step2: LM public key %s %s\n\n"%(list, user)
msg = header + line1 + line2 + line3 +'\n' + msg + '\n'
if debugflag:
print msg
# send to user
sendMail(msg, list, user)
time.sleep(2)
# Accept message email
# build a message
message = buildAcceptMsg( list, user, pubkey, seckey, LMpubkey, LKpubkey, LSadminkey, LKrevcert)
# sign a message
LK_ID = "LK (%s) <%s@%s>"%(list,list,SMTPDomain)
line4 = "This email contains the list server admin's %s public key, list public key, %s, a revocation certificate, if any, for the previous list public key, and an encryption/decryption key-pair for %s.\n"%(LS_EMAIL, LK_ID, user)
line5 = "To decrypt this email, you require a passphrase given to you by the List Moderator, %s.\n"%(LM_EMAIL)
line6 = "To do so refer to the email sent by %s with subject "%(LM_EMAIL)
line7 = "\"Instructions for setting up keys for %s subscribed to %s\" \n"%(user, list)
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS)
while msg.find("passphrase") <> -1:
print 'Try again!'
pass1 = getpass.getpass("Enter passphrase for LM signing key: ")
LMConfig["LM_SIG_PASS"]=pass1
LM_SIG_PASS = pass1
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
msg = encPassMsg(msg, list, userpass )
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s\n"%(user)
header += "Subject: Step3: Accept %s %s\n"%(list, user)
msg = header + line4 + line5 + line6 + line7 + '\n' +msg + '\n'
if debugflag:
print msg
# send to user
sendMail(msg, list, user)
def sendToLKPubToLS( list, LKkeyid, LKprimkeyid, config, debugflag ):
try:
global LM_ID, LM_SIG_ID, LM_PASS, LM_EMAIL, LS_ID, LS_EMAIL, SMTPDomain, MySMTPServer, MySMTPPort
listSELSPath = LM_PATH + '/lists/' + list
gnupg.options.homedir = listSELSPath
LMConfig = {}
if config == None:
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
else:
LMConfig = config
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
# build a message
message = buildLKPubKeyMsg( list, LKkeyid, LKprimkeyid, LM_EMAIL )
# sign a message
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
while msg.find("passphrase") <> -1:
print 'Try again!'
pass1 = getpass.getpass("Enter passphrase for LM signing key: ")
LMConfig["LM_SIG_PASS"]=pass1
LM_SIG_PASS = pass1
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s@%s\n"%(list, SMTPDomain )
header += "Subject: LKpubkey %s\n\n"%(list)
msg = header + msg + '\n'
if debugflag:
print msg
# send to LS
sendMail(msg, list, None)
except KeyboardInterrupt:
print ''
print '%s' % sys.exc_type
print 'shutting down'
sys.exit()
def join( list, user, pubfile, randfile, config, debugflag ):
global LM_ID, LM_SIG_ID, LM_PASS, LM_EMAIL, LS_ID, LS_EMAIL, SMTPDomain, MySMTPServer, MySMTPPort
listSELSPath = LM_PATH + '/lists/' + list
gnupg.options.homedir = listSELSPath
LMConfig = {}
if config == None:
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
else:
LMConfig = config
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
# read public key of user
try:
fp = open(os.path.normpath(pubfile), 'r')
pubkey = fp.read()
fp.close()
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
# read random number from a file for key generation
try:
fp = open(os.path.normpath(randfile), 'r')
random = fp.read()
fp.close()
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit()
# build a message
msg = buildJoinMsg( list, user, pubkey, random , LM_EMAIL)
# sign a message
msg = encMsg(msg, list, LS_ID )
while (msg.find('-----BEGIN PGP MESSAGE-----') == -1):
time.sleep(1)
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s@%s\n"%(list, SMTPDomain)
header += "Subject: Join %s %s\n\n"%(list, user)
msg = header + msg + '\n'
if debugflag:
print msg
# send to LS
sendMail(msg, list, None)
def unsubscribe( list, user, config ):
global LM_ID, LM_SIG_ID, LM_PASS, LM_EMAIL, LS_ID, LS_EMAIL, SMTPDomain, MySMTPServer, MySMTPPort
listSELSPath = LM_PATH + '/lists/' + list
gnupg.options.homedir = listSELSPath
LMConfig = {}
if config == None:
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
else:
LMConfig = config
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
# build a message
message = buildUnsubscribeMsg( list, user )
msg = signMsg( message, list, LM_SIG_ID, LM_SIG_PASS )
while msg.find("passphrase") <> -1:
print 'Try again!'
pass1 = getpass.getpass("Enter passphrase for LM signing key: ")
LM_SIG_PASS = pass1
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s@%s\n"%(list, SMTPDomain )
header += "Subject: Unsubscribe %s %s\n\n"%(list, user)
msg = header + msg + '\n'
# build a message
sendMail(msg, list, None)
def update( list, LMConfig ):
# build a message
message = buildUpdateMsg( list)
msg = signMsg( message, list, LM_SIG_ID, LM_SIG_PASS )
while msg.find("passphrase") <> -1:
print 'Try again!'
pass1 = getpass.getpass("Enter passphrase for LM signing key: ")
LMConfig["LM_SIG_PASS"]=pass1
LM_SIG_PASS = pass1
msg = signMsg(message, list, LM_SIG_ID, LM_SIG_PASS )
header = "From: %s\n"%(LM_EMAIL)
header += "To: %s@%s\n"%(list, SMTPDomain )
header += "Subject: Update %s\n\n"%(list)
msg = header + msg + '\n'
# build a message
sendMail(msg, list, None)
def main(arvg=None):
global LM_ID, LM_SIG_ID, LM_PASS, LM_EMAIL, LS_ID, LS_EMAIL, SMTPDomain, MySMTPServer, MySMTPPort
opts, args = getopt.getopt(sys.argv[1:], "c:l:u:f:",\
['pubfile=', 'randfile=', 'secfile=', 'paramfile=', 'LMpubfile=', 'LKpubfile='])
list = ""
user = ""
cmd = ""
pubfile = ""
randfile = ""
paramfile = ""
for o, v in opts:
if o == "-c":
cmd = v
if o == "-l":
list = v
if o == "-u":
user = v
if o == "--pubfile" :
pubfile = v
if o == "--LMpubfile" :
LMpubfile = v
if o == "--LKpubfile" :
LKpubfile = v
if o == "--secfile" :
secfile = v
if o == "--randfile":
randfile = v
if o == "--paramfile":
paramfile = v
listSELSPath = LM_PATH + '/lists/' + list
gnupg.options.homedir = listSELSPath
debugflag = 0
# read list config file
LMConfig = {}
listConfigFile = listSELSPath + '/list.conf'
readLMConfig( listConfigFile, LMConfig )
LM_ID = LMConfig['LM_ID']
LM_SIG_ID = LMConfig['LM_SIG_ID']
LM_PASS = LMConfig['LM_PASS']
LM_SIG_PASS = LMConfig['LM_SIG_PASS']
LM_EMAIL = LMConfig['LM_EMAIL']
LS_ID = LMConfig['LS_ID']
LS_EMAIL = LMConfig['LS_EMAIL']
SMTPDomain = LMConfig['SMTPDomain']
MySMTPServer = LMConfig['MySMTPServer']
MySMTPPort = LMConfig['MySMTPPort']
if cmd == 'join':
join( list, user, pubfile, randfile, LMConfig, debugflag )
if cmd == 'accept':
accept( list, user, pubfile, secfile, LMpubfile, LKpubfile, LMConfig, debugflag )
elif cmd == 'unsubscribe':
unsubscribe( list, user, LMConfig )
elif cmd == 'create':
createList( list, paramfile, None, LMConfig, debugflag)
elif cmd == 'update':
update( list, LMConfig )
return 0
if __name__ == "__main__":
sys.exit(main())
| 28.904903 | 230 | 0.641855 | 2,661 | 19,453 | 4.589252 | 0.108982 | 0.025794 | 0.02432 | 0.016951 | 0.676548 | 0.654438 | 0.634622 | 0.624386 | 0.605798 | 0.572142 | 0 | 0.003462 | 0.198273 | 19,453 | 672 | 231 | 28.947917 | 0.779559 | 0.040559 | 0 | 0.651079 | 0 | 0.005396 | 0.201048 | 0.001135 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.106115 | 0.021583 | null | null | 0.071942 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
7318a8ef2cf30e430c1ec8b0ac911ac2a554250e | 344 | py | Python | tests/plugins/tokenize_parser.py | n1kolasM/wemake-python-styleguide | f39e87897de89bea1c49d410beb5b1cbaf930807 | [
"MIT"
] | 1 | 2020-02-21T18:58:44.000Z | 2020-02-21T18:58:44.000Z | tests/plugins/tokenize_parser.py | n1kolasM/wemake-python-styleguide | f39e87897de89bea1c49d410beb5b1cbaf930807 | [
"MIT"
] | 15 | 2020-02-22T11:09:46.000Z | 2020-02-27T16:36:54.000Z | tests/plugins/tokenize_parser.py | n1kolasM/wemake-python-styleguide | f39e87897de89bea1c49d410beb5b1cbaf930807 | [
"MIT"
] | 1 | 2019-12-12T19:18:58.000Z | 2019-12-12T19:18:58.000Z | # -*- coding: utf-8 -*-
import io
import tokenize
from textwrap import dedent
import pytest
@pytest.fixture(scope='session')
def parse_tokens():
"""Parses tokens from a string."""
def factory(code: str):
lines = io.StringIO(dedent(code))
return list(tokenize.generate_tokens(lambda: next(lines)))
return factory
| 20.235294 | 66 | 0.680233 | 44 | 344 | 5.272727 | 0.659091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003597 | 0.19186 | 344 | 16 | 67 | 21.5 | 0.830935 | 0.148256 | 0 | 0 | 0 | 0 | 0.02439 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.4 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
731e80d1e926380197b733d958d43e4495319681 | 15,845 | py | Python | volInteract.py | thepcn3rd/volInteract | 0a053144c42e9748652f1b68c9ee1fe8e3a520d9 | [
"MIT"
] | 1 | 2019-12-10T16:43:23.000Z | 2019-12-10T16:43:23.000Z | volInteract.py | thepcn3rd/volInteract | 0a053144c42e9748652f1b68c9ee1fe8e3a520d9 | [
"MIT"
] | null | null | null | volInteract.py | thepcn3rd/volInteract | 0a053144c42e9748652f1b68c9ee1fe8e3a520d9 | [
"MIT"
] | null | null | null | #!/usr/bin/python
from threading import Thread
import cmd, time, os, sys
import ConfigParser
import subprocess
config = ConfigParser.ConfigParser()
config.read("config.ini")
VOLATILITY_LOCATION = config.get('volatility', 'volatility_location')
VOLATILITY_PROFILE = config.get('volatility', 'volatility_profile')
SAVE_LOCATION = config.get('volatility', 'project_save_location')
# Create the output directory if it does not exist
# Verify the Volatility Path - Add to config.ini
# Add volatility path to the options that can be setup
# Break-out the directories into stages of the checklist
# Integrate foremost into the tool...
# Integrate MFT analysis into the tool...
vPATH = "/usr/bin/vol.py" # Volatility PATH
def pluginExec(c):
global VOLATILITY_LOCATION, VOLATILITY_PROFILE, config, SAVE_LOCATION, vPATH
commandStr = ""
if c == "malfind":
if not os.path.exists("output/malfind-dump"):
os.makedirs("output/malfind-dump")
commandStr = vPATH + " -f " + VOLATILITY_LOCATION + " " + c + " --output-file=output/" + c + ".txt --dump-dir=output/malfind-dump"
# Save to a text file the processes that are unique in the malfind.txt output
commandStr += ";cat output/malfind.txt | grep 'Process:' | awk '{print $1 \" \" $2 \" \" $3 \" \" $4}' | sort | uniq -c | sort -n > output/malfind-unique-processes.txt"
# Save a clamscan of the malfind-dump directory to the output folder
commandStr += ";clamscan output/malfind-dump/ --log=output/malfind-clamscan-results.txt --quiet"
print "Executing: " + commandStr
else:
print "Appears the directory of malfind-dump already exists in the output. Remove before running again..."
else:
commandStr = vPATH + " -f " + VOLATILITY_LOCATION + " --profile=" + VOLATILITY_PROFILE + " " + c + " --output-file=output/" + c + ".txt"
try:
print "Running in the background... Run 'show output' to see if the file was created."
print "Execute 'cat <filename>' to read the contents of the output file."
print "Executing: " + commandStr
subprocess.Popen([commandStr], shell=True)
except:
print "Error executing command: " + commandStr
print
print
return
class volInteractive(cmd.Cmd):
def __init__(self):
global VOLATILITY_LOCATION, VOLATILITY_PROFILE
print
print "Volatility Workspace"
print "--------------------"
print "Volatility Image Location: " + VOLATILITY_LOCATION
print "Volatility Profile Selected: " + VOLATILITY_PROFILE
print "Project Directory: " + SAVE_LOCATION
selection = raw_input("Do you need to update the above options? ")
if selection == 'Y' or selection == 'y':
print "set location - Set the VOLATILITY_LOCATION in the config.ini"
print "set profile - Set the VOLATILITY_PROFILE in the config.ini"
print "set save - Set where teh Project files are saved"
print
print
print "Output Gathered"
print "---------------"
os.system("ls -l output")
print
if not os.path.exists("output"):
os.makedirs("output")
cmd.Cmd.__init__(self)
time.sleep(2)
self.prompt = "#> "
return
def do_show(self, command):
"""Show the settings that are configured"""
global VOLATILITY_LOCATION, VOLATILITY_PROFILE, SAVE_LOCATION
if command == "all":
print "Volatility Image Location: " + VOLATILITY_LOCATION
print "Volatility Profile Selected: " + VOLATILITY_PROFILE
print "Project Directory: " + SAVE_LOCATION
print
elif command == "location":
print "Volatility Image Location: " + VOLATILITY_LOCATION
print
elif command == "output":
os.system("ls -l output/")
print
elif command == "profile":
print "Volatility Profile Selected: " + VOLATILITY_PROFILE
print
elif command == "save":
print "Project Directory: " + SAVE_LOCATION
else:
print "show all - Show all of the settings configured"
print "show location - Show the VOLATILITY_LOCATION selected"
print "show output - Show the contents of the output directory"
print "show profile - Show the VOLATILITY_PROFILE selected"
print "show save - Show where the Project files are saved"
print
return
def do_output(self, command):
"""Show the contents of the output directory"""
os.system("ls -l output/")
print
return
def do_use(self, command):
"""Use the specified plugin"""
if command == "psscan" or command == "pslist" or command == "pstree" or command == "psxview":
pluginExec(command)
elif command == "autoruns":
pluginExec(command)
elif command == "consoles" or command == "cmdscan" or command == "connections" or command == "connscan":
pluginExec(command)
elif command == "imageinfo":
pluginExec(command)
elif command == "malfind":
pluginExec(command)
elif command == "clamscan":
commandStr = "clamscan output/ --log=output/clamscan.txt --quiet"
print "Executing: " + commandStr
subprocess.Popen([commandStr], shell=True)
elif command == "sockets" or command == "sockscan" or command == "svcscan":
pluginExec(command)
else:
print "use autoruns - Searches the registry and memory space for applications running at system startup and maps them to running processes"
print "use clamscan - Executes a recursive scan on the output folder"
print "use cmdscan - Extract command history by scanning for _COMMAND_HISTORY"
print "use connections - Print list of open connections [Windows XP and 2003 Only]"
print "use connscan - Pool scanner for tcp connections"
print "use consoles - Extract command history by scanning for _CONSOLE_INFORMATION"
print "use imageinfo - Executes the imageinfo plugin"
print "use malfind - Find hidden and injected code"
print "use pslist - Print all running processes by following the EPROCESS lists"
print "use psscan - Executes the psscan plugin in the background"
print "use psxview - Find hidden processes with various process listings"
print "use pstree - Print process list as a tree"
print "use sockets - Print list of open sockets"
print "use sockscan - Pool scanner for tcp socket objects"
print "use svcscan - Scan for Windows services"
#print "use pstotal - Combination of pslist,psscan & pstree
print
return
def do_pwd(self, command):
"""Displays the present working directory"""
os.system("pwd")
print
return
def do_ls(self, command):
"""Displays a directory listing of the pwd or specified directory of output or directories in output"""
items = command.split(" ")
if items[0] <> '':
if items[0] == "output":
os.system("ls -l output/")
elif items[0] == "malfind-dump":
os.system("ls -l output/malfind-dump/")
else:
"""Displays the content of the current directory."""
os.system("ls -l")
print
return
def do_cat(self, command):
"""cat a particular file in the output directory"""
command = command.replace(" ","")
command = command.replace(";","")
systemCmd = "cat output/" + command
os.system(systemCmd)
print
return
def do_search(self, command):
"""Search for a keyword in the files in the output directory. Does not include the sub-directories"""
command = command.replace(" ","")
command = command.replace(";","")
command = command.strip()
if not command == "":
systemCmd = "grep -i " + command + " output/*.txt"
os.system(systemCmd)
print
return
def do_clamscan(self, command):
"""Run a recursive clamscan on files in the output directory"""
commandStr = "clamscan output/ --log=output/clamscan.txt --quiet"
print "Executing: " + commandStr
subprocess.Popen([commandStr], shell=True)
return
def do_note(self, command):
"""Creates a note and appends it to output/notes.txt"""
txtNote = raw_input("Note: ")
f = open('output/notes.txt', 'a')
txtNote = txtNote + '\n'
f.write(txtNote)
f.close()
print
return
def do_set(self, command):
"""Set and save Global Variables to config.ini"""
global VOLATILITY_LOCATION, VOLATILITY_PROFILE, config, SAVE_LOCATION
if command == "location":
newLocation = raw_input("New Image Location: ")
newLocation = newLocation.strip()
config.set('volatility','volatility_location', newLocation)
with open('config.ini', 'wb') as configfile:
config.write(configfile)
VOLATILITY_LOCATION = config.get('volatility', 'volatility_location')
print
elif command == "profile":
newProfile = raw_input("New Image Profile: ")
newProfile = newProfile.strip()
config.set('volatility','volatility_profile', newProfile)
with open('config.ini', 'wb') as configfile:
config.write(configfile)
VOLATILITY_PROFILE = config.get('volatility', 'volatility_profile')
print
elif command == "save":
newSave = raw_input("New Project Directory (ie. /home/user2/project1): ")
newSave = newSave.strip()
config.set('volatility','project_save_location', newSave)
with open('config.ini', 'wb') as configfile:
config.write(configfile)
SAVE_LOCATION = config.get('volatility', 'project_save_location')
print
else:
print "set location - Set the VOLATILITY_LOCATION in the config.ini"
print "set profile - Set the VOLATILITY_PROFILE in the config.ini"
print "set save - Set where teh Project files are saved"
print
return
def do_checklist(self, command):
"""Outputs the Best Practice Checklist of Malware Analysis based on Information Collected"""
### Stage 1
print "Stage 1: Identify Rogue Processes"
if os.path.exists("output/pslist.txt"):
print "[X] pslist - Print all running processes within the EPROCESS doubly linked list"
else:
print "[ ] pslist - Print all running processes within the EPROCESS doubly linked list"
if os.path.exists("output/psscan.txt"):
print "[X] psscan - Scan physical memory for EPROCESS pool allocations"
else:
print "[ ] psscan - Scan physical memory for EPROCESS pool allocations"
if os.path.exists("output/pstree.txt"):
print "[X] pstree - Print Process list as a tree showing parent relationships using EPROCESS linked list"
else:
print "[ ] pstree - Print Process list as a tree showing parent relationships using EPROCESS linked list"
if os.path.exists("output/pstotal.txt"):
print "[X] pstotal - Comparison of psscan and pslist results. Also produces output in graphics format"
else:
print "[ ] pstotal - Comparison of psscan and pslist results. Also produces output in graphics format"
if os.path.exists("output/malsysproc.txt"):
print "[X] malsysproc - Identify suspicious system processes"
else:
print "[ ] malsysproc - Identify suspicious system processes"
if os.path.exists("output/processbl.txt"):
print "[X] processbl - Compares processes and loaded DLLs with a Baseline Image"
else:
print "[ ] processbl - Compares processes and loaded DLLs with a Baseline Image"
### Stage 2
print
print "Stage 2: Analyze Process Objects"
if os.path.exists("output/dlllist.txt"):
print "[X] dlllist - Print list of loaded dlls for each process"
else:
print "[ ] dlllist - Print list of loaded dlls for each process"
if os.path.exists("output/cmdline.txt"):
print "[X] cmdline - Display command-line args for each process"
else:
print "[ ] cmdline - Display command-line args for each process"
if os.path.exists("output/cmdscan.txt"):
print "[X] cmdscan - Extract command history by scanning for _COMMAND_HISTORY"
else:
print "[ ] cmdscan - Extract command history by scanning for _COMMAND_HISTORY"
if os.path.exists("output/getsids.txt"):
print "[X] getsids - Print process security identifiers"
else:
print "[ ] getsids - Print process security identifiers"
if os.path.exists("output/handles.txt"):
print "[X] handles - List of open handles for each process"
else:
print "[ ] handles - List of open handles for each process"
if os.path.exists("output/filescan.txt"):
print "[X] filescan - Scan memory for FILE_OBJECT handles"
else:
print "[ ] filescan - Scan memory for FILE_OBJECT handles"
if os.path.exists("output/svcscan.txt"):
print "[X] svcscan - Scan for Windows Service Information"
else:
print "[ ] svcscan - Scan for Windows Service Information"
if os.path.exists("output/autoruns.txt"):
print "[X] autoruns - Searches the registry and memory locations running at system startup and maps them to running processes"
else:
print "[ ] autoruns - Searches the registry and memory locations running at system startup and maps them to running processes"
### Stage 3
print
print "Stage 3: Review Network Artifacts"
if os.path.exists("output/connections.txt"):
print "[X] connections - List of Open TCP Connections [XP]"
else:
print "[ ] connections - List of Open TCP Connections [XP]"
if os.path.exists("output/connscan.txt"):
print "[X] connscan - TCP Connections including closed [XP]"
else:
print "[ ] connscan - TCP Connections including closed [XP]"
if os.path.exists("output/sockets.txt"):
print "[X] sockets - Print Listening Sockets (any protocol)"
else:
print "[ ] sockets - Print Listening Sockets (any protocol)"
if os.path.exists("output/sockscan.txt"):
print "[X] sockscan - ID sockets, including closed and unlinked"
else:
print "[ ] sockscan - ID sockets, including closed and unlinked"
if os.path.exists("output/netscan.txt"):
print "[X] netscan - Scan for connections and sockets"
else:
print "[ ] netscan - Scan for connections and sockets"
### Stage 4
print
print "Stage 4: Look for Evidence of Code Injection"
if os.path.exists("output/malfind.txt"):
print "[X] malfind - Find injected code and dump sections"
else:
print "[ ] malfind - Find injected code and dump sections"
if os.path.exists("output/ldrmodules.txt"):
print "[X] ldrmodules - Detect unlinked DLLs"
else:
print "[ ] ldrmodules - Detect unlinked DLLs"
### Stage 5
print
print "Stage 5: Check for Signs of a Rootkit"
if os.path.exists("output/psxview.txt"):
print "[X] psxview - Find hidden processes using cross-view"
else:
print "[ ] psxview - Find hidden processes using cross-view"
if os.path.exists("output/modscan.txt"):
print "[X] modscan - Scan memory for loaded, unloaded and unlinked drivers"
else:
print "[ ] modscan - Scan memory for loaded, unloaded and unlinked drivers"
if os.path.exists("output/apihooks.txt"):
print "[X] apihooks - Find API/DLL function hooks"
else:
print "[ ] apihooks - Find API/DLL function hooks"
if os.path.exists("output/ssdt.txt"):
print "[X] ssdt - Hooks in System Service Descriptor Table"
else:
print "[ ] ssdt - Hooks in System Service Descriptor Table"
if os.path.exists("output/driverirp.txt"):
print "[X] driverirp - Identify I/O Request Packet Hooks"
else:
print "[ ] driverirp - Identify I/O Request Packet Hooks"
if os.path.exists("output/idt.txt"):
print "[X] idt - Display Interrupt Descriptor Table"
else:
print "[ ] idt - Display Interrupt Descriptor Table"
### Stage 6
print
print "Stage 6: Dump Suspicious Processes and Drivers"
if os.path.exists("output/dlldump.txt"):
print "[X] dlldump - Extract DLLs from Specific Processes"
else:
print "[ ] dlldump - Extract DLLs from Specific Processes"
if os.path.exists("output/moddump.txt"):
print "[X] moddump - Extract Kernel Drivers"
else:
print "[ ] moddump - Extract Kernel Drivers"
if os.path.exists("output/procmemdump.txt"):
print "[X] procmemdump - Dump process to executable sample"
else:
print "[ ] procmemdump - Dump process to executable sample"
if os.path.exists("output/memdump.txt"):
print "[X] memdump - Dump every memory section into a file"
else:
print "[ ] memdump - Dump every memory section into a file"
return
def emptyline(self):
pass
return
def do_exit(self, line):
"""Exit the Volatility Workspace"""
return True
if __name__ == '__main__':
vI = volInteractive()
t1 = Thread(target = vI.cmdloop)
t1.start()
t1.join()
| 39.317618 | 171 | 0.704386 | 2,104 | 15,845 | 5.261882 | 0.169202 | 0.028453 | 0.035769 | 0.053654 | 0.545028 | 0.450637 | 0.348749 | 0.254087 | 0.188962 | 0.157077 | 0 | 0.00224 | 0.182834 | 15,845 | 402 | 172 | 39.415423 | 0.852796 | 0.035405 | 0 | 0.382184 | 0 | 0.011494 | 0.535993 | 0.025468 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.002874 | 0.011494 | null | null | 0.41092 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
73241d906af553548dbe952833fccd5714f8915c | 1,458 | py | Python | src/observer/python/test.py | oxnz/design-patterns | 6fe3b4e7a2f524a5c1ebac7f4d3eed0b0f955c59 | [
"MIT"
] | 117 | 2015-01-31T13:01:02.000Z | 2022-02-02T19:55:19.000Z | src/observer/python/test.py | addisonlynch/design-patterns | e9e0f6029ac427a8c81847e8df4a2720b03ce918 | [
"MIT"
] | 1 | 2018-01-10T13:06:51.000Z | 2018-01-10T13:06:51.000Z | src/observer/python/test.py | addisonlynch/design-patterns | e9e0f6029ac427a8c81847e8df4a2720b03ce918 | [
"MIT"
] | 34 | 2015-08-17T13:54:30.000Z | 2020-03-23T19:35:13.000Z | #!/usr/bin/python
#coding: utf-8
class Subject(object):
def __init__(self):
self._observers = []
def attach(self, observer):
if not observer in self._observers:
self._observers.append(observer)
def detach(self, observer):
try:
self._observers.remove(observer)
except ValueError:
pass
def notify(self, modifier=None):
for observer in self._observers:
if modifier != observer:
observer.update(self)
# Example usage
class DataSubject(Subject):
def __init__(self, name=""):
super(DataSubject, self).__init__()
self.name = name
self._data = 0
@property
def data(self):
return self._data
@data.setter
def data(self, data):
self._data = data
self.notify()
class Observer:
def __init__(self):
pass
def update(self, subject):
pass
class DataObserver(Observer):
def update(self, subject):
print ("DataSubject: %s has data %d") % (subject.name, subject.data)
def test():
d1 = DataSubject("DataSubject 1")
d2 = DataSubject("DataSubject 2")
ob1 = DataObserver()
ob2 = DataObserver()
d1.attach(ob1);
d1.attach(ob2);
d2.attach(ob1);
d2.attach(ob2);
print ("setting DataSubject 1 to 10")
print d1.data
d1.data = 10
print ("setting DataSubject 2 to 14")
d2.data = 14
print ("data 1 detach ob2")
d1.detach(ob2)
print ("setting DataSubject 1 to 20")
d1.data = 20
print ("data 1 detach ob1")
d1.detach(ob1)
print ("setting DataSubject 1 to 30")
d1.data = 30
if __name__ == '__main__':
test()
| 20.535211 | 70 | 0.692044 | 206 | 1,458 | 4.742718 | 0.286408 | 0.06653 | 0.094166 | 0.073695 | 0.085977 | 0.059365 | 0 | 0 | 0 | 0 | 0 | 0.04063 | 0.17284 | 1,458 | 70 | 71 | 20.828571 | 0.769486 | 0.029492 | 0 | 0.118644 | 0 | 0 | 0.143768 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.050847 | 0 | null | null | 0.135593 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
7329a6553c1d534de7857ec6b3b9822c54f2e0ba | 3,279 | py | Python | python/oneflow/nn/__init__.py | Warmchay/oneflow | 5a333ff065bb89990318de2f1bd650e314d49301 | [
"Apache-2.0"
] | null | null | null | python/oneflow/nn/__init__.py | Warmchay/oneflow | 5a333ff065bb89990318de2f1bd650e314d49301 | [
"Apache-2.0"
] | null | null | null | python/oneflow/nn/__init__.py | Warmchay/oneflow | 5a333ff065bb89990318de2f1bd650e314d49301 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from oneflow.nn.graph import Graph
from oneflow.nn.module import Module
from oneflow.nn.modules.activation import (
ELU,
GELU,
Hardsigmoid,
Hardswish,
Hardtanh,
LeakyReLU,
LogSigmoid,
LogSoftmax,
Mish,
PReLU,
ReLU,
ReLU6,
Sigmoid,
Softmax,
Softplus,
Tanh,
SELU,
SiLU,
Softsign,
)
from oneflow.nn.modules.all_reduce import AllReduce
from oneflow.nn.modules.batchnorm import BatchNorm1d, BatchNorm2d, BatchNorm3d
from oneflow.nn.modules.container import (
ModuleDict,
ModuleList,
ParameterDict,
ParameterList,
Sequential,
)
from oneflow.nn.modules.conv import Conv1d, Conv2d, Conv3d, ConvTranspose2d
from oneflow.nn.modules.min_max_observer import MinMaxObserver
from oneflow.nn.modules.moving_average_min_max_observer import (
MovingAverageMinMaxObserver,
)
from oneflow.nn.modules.fake_quantization import FakeQuantization
from oneflow.nn.modules.quantization import Quantization
from oneflow.nn.modules.dataset import (
COCOReader,
CoinFlip,
CropMirrorNormalize,
OFRecordImageDecoder,
OFRecordImageDecoderRandomCrop,
OFRecordImageGpuDecoderRandomCropResize,
OFRecordRawDecoder,
OFRecordRawDecoder as OfrecordRawDecoder,
OFRecordReader,
OFRecordReader as OfrecordReader,
OFRecordBytesDecoder,
GPTIndexedBinDataReader,
)
from oneflow.nn.modules.dropout import Dropout
from oneflow.nn.modules.flatten import Flatten
from oneflow.nn.modules.instancenorm import (
InstanceNorm1d,
InstanceNorm2d,
InstanceNorm3d,
)
from oneflow.nn.modules.linear import Identity, Linear
from oneflow.nn.modules.loss import (
BCELoss,
BCEWithLogitsLoss,
CrossEntropyLoss,
CTCLoss,
KLDivLoss,
L1Loss,
MarginRankingLoss,
MSELoss,
NLLLoss,
SmoothL1Loss,
CombinedMarginLoss,
)
from oneflow.nn.modules.normalization import GroupNorm, LayerNorm
from oneflow.nn.modules.padding import (
ConstantPad1d,
ConstantPad2d,
ConstantPad3d,
ReflectionPad2d,
ReplicationPad2d,
ZeroPad2d,
)
from oneflow.nn.modules.pixelshuffle import PixelShufflev2 as PixelShuffle
from oneflow.nn.modules.pooling import (
AvgPool1d,
AvgPool2d,
AvgPool3d,
MaxPool1d,
MaxPool2d,
MaxPool3d,
AdaptiveAvgPool1d,
AdaptiveAvgPool2d,
AdaptiveAvgPool3d,
)
from oneflow.nn.modules.sparse import Embedding
from oneflow.nn.modules.upsampling import (
Upsample,
UpsamplingBilinear2d,
UpsamplingNearest2d,
)
from oneflow.nn.modules.fold import Fold, Unfold
from oneflow.nn.parameter import Parameter
from oneflow.nn import utils
from . import functional
from . import parallel
| 25.617188 | 78 | 0.761208 | 359 | 3,279 | 6.930362 | 0.512535 | 0.114952 | 0.135852 | 0.176849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014455 | 0.177188 | 3,279 | 127 | 79 | 25.818898 | 0.907709 | 0.177188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.264151 | 0 | 0.264151 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
73392111fe0d7bd6483808b661d6a523f3d18ab1 | 13,868 | py | Python | pysnmp-with-texts/CISCOSB-STACK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/CISCOSB-STACK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/CISCOSB-STACK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCOSB-STACK-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCOSB-STACK-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:23:37 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection")
MacAddress, = mibBuilder.importSymbols("BRIDGE-MIB", "MacAddress")
switch001, = mibBuilder.importSymbols("CISCOSB-MIB", "switch001")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Gauge32, Counter64, MibIdentifier, TimeTicks, ModuleIdentity, ObjectIdentity, Unsigned32, NotificationType, iso, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Gauge32", "Counter64", "MibIdentifier", "TimeTicks", "ModuleIdentity", "ObjectIdentity", "Unsigned32", "NotificationType", "iso", "Integer32")
TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "DisplayString", "TextualConvention")
rlStack = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107))
rlStack.setRevisions(('2005-04-14 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: rlStack.setRevisionsDescriptions(('Initial revision.',))
if mibBuilder.loadTexts: rlStack.setLastUpdated('200504140000Z')
if mibBuilder.loadTexts: rlStack.setOrganization('Cisco Small Business')
if mibBuilder.loadTexts: rlStack.setContactInfo('Postal: 170 West Tasman Drive San Jose , CA 95134-1706 USA Website: Cisco Small Business Home http://www.cisco.com/smb>;, Cisco Small Business Support Community <http://www.cisco.com/go/smallbizsupport>')
if mibBuilder.loadTexts: rlStack.setDescription('The private MIB module definition for stack.')
class StackMode(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("standalone", 1), ("native", 2), ("basic-hybrid", 3), ("advanced-hybrid", 4), ("advanced-hybrid-XG", 5))
class PortsPair(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("pair-s1s2", 1), ("pair-s3s4", 2), ("pair-s1s25G", 3), ("pair-s1s2Xg", 4), ("pair-lionXg", 5))
class HybridStackPortSpeed(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("port-speed-1G", 1), ("port-speed-5G", 2), ("port-speed-10G", 3), ("port-speed-auto", 4), ("port-speed-down", 5))
class HybridStackDeviceMode(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("mode-L2", 1), ("mode-L3", 2))
rlStackActiveUnitIdTable = MibTable((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 1), )
if mibBuilder.loadTexts: rlStackActiveUnitIdTable.setStatus('current')
if mibBuilder.loadTexts: rlStackActiveUnitIdTable.setDescription(' The table listing the active unit id of the requested unit.')
rlStackActiveUnitIdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 1, 1), ).setIndexNames((0, "CISCOSB-STACK-MIB", "rlStackCurrentUnitId"))
if mibBuilder.loadTexts: rlStackActiveUnitIdEntry.setStatus('current')
if mibBuilder.loadTexts: rlStackActiveUnitIdEntry.setDescription(' An entry in the rlStackActiveUnitIdTable.')
rlStackCurrentUnitId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: rlStackCurrentUnitId.setStatus('current')
if mibBuilder.loadTexts: rlStackCurrentUnitId.setDescription('The unit number device, which is the active unit id')
rlStackActiveUnitIdAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackActiveUnitIdAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackActiveUnitIdAfterReset.setDescription('Indicates the unit id that will be after reset.')
rlStackUnitModeAfterReset = MibScalar((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("standalone", 1), ("stack", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackUnitModeAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackUnitModeAfterReset.setDescription('set unit type that will be after reset, standalone or stack.')
rlStackUnitMode = MibScalar((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("standalone", 1), ("stack", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackUnitMode.setStatus('current')
if mibBuilder.loadTexts: rlStackUnitMode.setDescription('show unit type standalone or stack.')
rlStackUnitMacAddressAfterReset = MibScalar((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 4), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackUnitMacAddressAfterReset.setReference('IEEE 802.1D-1990: Sections 6.4.1.1.3 and 3.12.5')
if mibBuilder.loadTexts: rlStackUnitMacAddressAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackUnitMacAddressAfterReset.setDescription('The MAC address used by this bridge after rest.')
rlStackHybridTable = MibTable((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5), )
if mibBuilder.loadTexts: rlStackHybridTable.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridTable.setDescription(' The table listing information required for hybrid stack.')
rlStackHybridEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1), ).setIndexNames((0, "CISCOSB-STACK-MIB", "rlStackHybridUnitId"))
if mibBuilder.loadTexts: rlStackHybridEntry.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridEntry.setDescription(' An entry in the rlStackActiveUnitIdTable.')
rlStackHybridUnitId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: rlStackHybridUnitId.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridUnitId.setDescription('The unit number device, which is the active unit id')
rlStackHybridStackMode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 2), StackMode()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridStackMode.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridStackMode.setDescription('Indicates the unit stack mode.')
rlStackHybridPortsPair = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 3), PortsPair()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridPortsPair.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridPortsPair.setDescription('Indicates the PortsPair.')
rlStackHybridPortNo1speed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 4), HybridStackPortSpeed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridPortNo1speed.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridPortNo1speed.setDescription('Indicates the rlStackHybridPortNo1speed.')
rlStackHybridPortNo2speed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 5), HybridStackPortSpeed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridPortNo2speed.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridPortNo2speed.setDescription('Indicates the rlStackHybridPortNo2speed.')
rlStackHybridUnitIdAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridUnitIdAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridUnitIdAfterReset.setDescription('Indicates the unit id that will be after reset.')
rlStackHybridStackModeAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 7), StackMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridStackModeAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridStackModeAfterReset.setDescription('Indicates the unit stack mode that will be after reset.')
rlStackHybridPortsPairAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 8), PortsPair()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridPortsPairAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridPortsPairAfterReset.setDescription('Indicates the PortsPair that will be after reset.')
rlStackHybridPortNo1speedAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 9), HybridStackPortSpeed()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridPortNo1speedAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridPortNo1speedAfterReset.setDescription('Indicates the HybridStackPortSpeed that will be after reset.')
rlStackHybridPortNo2speedAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 10), HybridStackPortSpeed()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridPortNo2speedAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridPortNo2speedAfterReset.setDescription('Indicates the HybridStackPortSpeed that will be after reset.')
rlStackHybridDeleteStartupAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridDeleteStartupAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridDeleteStartupAfterReset.setDescription('Indicates whether the startup configuration is deleted after reset.')
rlStackHybridDeviceModeAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 12), HybridStackDeviceMode()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridDeviceModeAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridDeviceModeAfterReset.setDescription('Indicates Device mode (Layer2 or Layer3) after reset.')
rlStackHybridXgPortNo1Num = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridXgPortNo1Num.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridXgPortNo1Num.setDescription('Indicates the 1st stack cascade active port number.')
rlStackHybridXgPortNo1NumAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridXgPortNo1NumAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridXgPortNo1NumAfterReset.setDescription('Indicates the 1st stack cascade port number that will be after reset.')
rlStackHybridXgPortNo2Num = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlStackHybridXgPortNo2Num.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridXgPortNo2Num.setDescription('Indicates the 2nd stack cascade active port number.')
rlStackHybridXgPortNo2NumAfterReset = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 6, 1, 101, 107, 5, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlStackHybridXgPortNo2NumAfterReset.setStatus('current')
if mibBuilder.loadTexts: rlStackHybridXgPortNo2NumAfterReset.setDescription('Indicates the 2nd stack cascade port number that will be after reset.')
mibBuilder.exportSymbols("CISCOSB-STACK-MIB", rlStackHybridPortNo1speed=rlStackHybridPortNo1speed, rlStackUnitMode=rlStackUnitMode, rlStackHybridStackModeAfterReset=rlStackHybridStackModeAfterReset, rlStackActiveUnitIdTable=rlStackActiveUnitIdTable, rlStackHybridPortNo1speedAfterReset=rlStackHybridPortNo1speedAfterReset, rlStackHybridDeviceModeAfterReset=rlStackHybridDeviceModeAfterReset, rlStack=rlStack, PortsPair=PortsPair, rlStackHybridPortNo2speed=rlStackHybridPortNo2speed, rlStackHybridXgPortNo1NumAfterReset=rlStackHybridXgPortNo1NumAfterReset, rlStackActiveUnitIdAfterReset=rlStackActiveUnitIdAfterReset, rlStackHybridTable=rlStackHybridTable, rlStackHybridUnitId=rlStackHybridUnitId, rlStackHybridPortsPairAfterReset=rlStackHybridPortsPairAfterReset, rlStackHybridEntry=rlStackHybridEntry, rlStackHybridXgPortNo2NumAfterReset=rlStackHybridXgPortNo2NumAfterReset, rlStackHybridStackMode=rlStackHybridStackMode, rlStackHybridPortNo2speedAfterReset=rlStackHybridPortNo2speedAfterReset, HybridStackDeviceMode=HybridStackDeviceMode, rlStackUnitModeAfterReset=rlStackUnitModeAfterReset, PYSNMP_MODULE_ID=rlStack, HybridStackPortSpeed=HybridStackPortSpeed, rlStackHybridDeleteStartupAfterReset=rlStackHybridDeleteStartupAfterReset, rlStackHybridXgPortNo2Num=rlStackHybridXgPortNo2Num, rlStackCurrentUnitId=rlStackCurrentUnitId, rlStackActiveUnitIdEntry=rlStackActiveUnitIdEntry, rlStackHybridPortsPair=rlStackHybridPortsPair, rlStackHybridXgPortNo1Num=rlStackHybridXgPortNo1Num, rlStackHybridUnitIdAfterReset=rlStackHybridUnitIdAfterReset, rlStackUnitMacAddressAfterReset=rlStackUnitMacAddressAfterReset, StackMode=StackMode)
| 117.525424 | 1,631 | 0.796077 | 1,455 | 13,868 | 7.586254 | 0.173883 | 0.060881 | 0.106541 | 0.009422 | 0.446729 | 0.284291 | 0.22794 | 0.22794 | 0.220783 | 0.201214 | 0 | 0.059308 | 0.080834 | 13,868 | 117 | 1,632 | 118.529915 | 0.806621 | 0.023652 | 0 | 0.028571 | 0 | 0.019048 | 0.204493 | 0.01079 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.07619 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
734fe43649f911bfb91a1a8fabfd5dd9bf719a1b | 90 | py | Python | problemas-resolvidos/neps-academy/programacao-basica-competicoes-c++/python/Exercicio-14-Divisores.py | Ramo-UERJ/competitive-programming | 7f2b821862853a7ebc1de5454914bcc9ea626083 | [
"MIT"
] | null | null | null | problemas-resolvidos/neps-academy/programacao-basica-competicoes-c++/python/Exercicio-14-Divisores.py | Ramo-UERJ/competitive-programming | 7f2b821862853a7ebc1de5454914bcc9ea626083 | [
"MIT"
] | 1 | 2020-07-29T13:23:25.000Z | 2020-07-29T13:23:25.000Z | problemas-resolvidos/neps-academy/programacao-basica-competicoes-c++/python/Exercicio-14-Divisores.py | ieee-uerj/competitive-programming | 7f2b821862853a7ebc1de5454914bcc9ea626083 | [
"MIT"
] | null | null | null | n = int(input())
for i in range(1,n+1):
if ((n % i) == 0):
print(i,end=" ")
| 12.857143 | 24 | 0.411111 | 17 | 90 | 2.176471 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04918 | 0.322222 | 90 | 6 | 25 | 15 | 0.557377 | 0 | 0 | 0 | 0 | 0 | 0.011236 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
73613d0fa662b017d4bccaafd1c287cc36962c82 | 1,436 | py | Python | apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/zope/interface/tests/test_sorting.py | tharindu1st/apim-migration-resources | dd68aa8c53cf310392bb72e699dd24c57b109cfb | [
"Apache-2.0"
] | 9,953 | 2019-04-03T23:41:04.000Z | 2022-03-31T11:54:44.000Z | apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/zope/interface/tests/test_sorting.py | tharindu1st/apim-migration-resources | dd68aa8c53cf310392bb72e699dd24c57b109cfb | [
"Apache-2.0"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/zope/interface/tests/test_sorting.py | tharindu1st/apim-migration-resources | dd68aa8c53cf310392bb72e699dd24c57b109cfb | [
"Apache-2.0"
] | 2,803 | 2019-04-06T13:15:33.000Z | 2022-03-31T07:42:01.000Z | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test interface sorting
"""
import unittest
from zope.interface import Interface
class I1(Interface): pass
class I2(I1): pass
class I3(I1): pass
class I4(Interface): pass
class I5(I4): pass
class I6(I2): pass
class Test(unittest.TestCase):
def test(self):
l = [I1, I3, I5, I6, I4, I2]
l.sort()
self.assertEqual(l, [I1, I2, I3, I4, I5, I6])
def test_w_None(self):
l = [I1, None, I3, I5, I6, I4, I2]
l.sort()
self.assertEqual(l, [I1, I2, I3, I4, I5, I6, None])
def test_w_equal_names(self):
# interfaces with equal names but different modules should sort by
# module name
from zope.interface.tests.m1 import I1 as m1_I1
l = [I1, m1_I1]
l.sort()
self.assertEqual(l, [m1_I1, I1])
| 29.916667 | 78 | 0.590529 | 195 | 1,436 | 4.307692 | 0.430769 | 0.064286 | 0.032143 | 0.071429 | 0.127381 | 0.102381 | 0.102381 | 0.102381 | 0.102381 | 0.102381 | 0 | 0.047452 | 0.207521 | 1,436 | 47 | 79 | 30.553191 | 0.690685 | 0.391365 | 0 | 0.136364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136364 | 1 | 0.136364 | false | 0.272727 | 0.136364 | 0 | 0.590909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
736231b9303786a1204dc803e3bbfdc054ca64e5 | 2,012 | py | Python | conans/test/unittests/client/rest/uploader_test.py | Ignition/conan | 84a38590987ecb9f3011f73babc95598ea62535f | [
"MIT"
] | null | null | null | conans/test/unittests/client/rest/uploader_test.py | Ignition/conan | 84a38590987ecb9f3011f73babc95598ea62535f | [
"MIT"
] | 1 | 2019-09-13T10:47:42.000Z | 2019-09-13T10:58:38.000Z | conans/test/unittests/client/rest/uploader_test.py | Ignition/conan | 84a38590987ecb9f3011f73babc95598ea62535f | [
"MIT"
] | null | null | null | import tempfile
import unittest
from collections import namedtuple
import six
from conans.client.rest.uploader_downloader import Uploader
from conans.errors import AuthenticationException, ForbiddenException
from conans.test.utils.tools import TestBufferConanOutput
from conans.util.files import save
class UploaderUnitTest(unittest.TestCase):
def test_401_raises_unauthoirzed_exception(self):
class MockRequester(object):
def put(self, *args, **kwargs):
return namedtuple("response", "status_code content")(401, "tururu")
out = TestBufferConanOutput()
uploader = Uploader(MockRequester(), out, verify=False)
f = tempfile.mktemp()
save(f, "some contents")
with six.assertRaisesRegex(self, AuthenticationException, "tururu"):
uploader.upload("fake_url", f)
def test_403_raises_unauthoirzed_exception_if_no_token(self):
class MockRequester(object):
def put(self, *args, **kwargs):
return namedtuple("response", "status_code content")(403, "tururu")
out = TestBufferConanOutput()
auth = namedtuple("auth", "token")(None)
uploader = Uploader(MockRequester(), out, verify=False)
f = tempfile.mktemp()
save(f, "some contents")
with six.assertRaisesRegex(self, AuthenticationException, "tururu"):
uploader.upload("fake_url", f, auth=auth)
def test_403_raises_forbidden_exception_if_token(self):
class MockRequester(object):
def put(self, *args, **kwargs):
return namedtuple("response", "status_code content")(403, "tururu")
out = TestBufferConanOutput()
auth = namedtuple("auth", "token")("SOMETOKEN")
uploader = Uploader(MockRequester(), out, verify=False)
f = tempfile.mktemp()
save(f, "some contents")
with six.assertRaisesRegex(self, ForbiddenException, "tururu"):
uploader.upload("fake_url", f, auth=auth)
| 34.689655 | 83 | 0.668986 | 207 | 2,012 | 6.386473 | 0.299517 | 0.030257 | 0.049924 | 0.06354 | 0.638427 | 0.638427 | 0.638427 | 0.638427 | 0.605144 | 0.605144 | 0 | 0.011494 | 0.22167 | 2,012 | 57 | 84 | 35.298246 | 0.832695 | 0 | 0 | 0.585366 | 0 | 0 | 0.102883 | 0 | 0 | 0 | 0 | 0 | 0.073171 | 1 | 0.146341 | false | 0 | 0.195122 | 0.073171 | 0.512195 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
736437c027c26b5ac4c9fcc937ef2adee38f97b2 | 2,405 | py | Python | src/moddem/modealcard.py | chainjazz/moddem | 1af8083923f0a0def5fb8468d9ea860f56beed8c | [
"BSD-2-Clause"
] | null | null | null | src/moddem/modealcard.py | chainjazz/moddem | 1af8083923f0a0def5fb8468d9ea860f56beed8c | [
"BSD-2-Clause"
] | null | null | null | src/moddem/modealcard.py | chainjazz/moddem | 1af8083923f0a0def5fb8468d9ea860f56beed8c | [
"BSD-2-Clause"
] | 1 | 2021-11-04T18:12:49.000Z | 2021-11-04T18:12:49.000Z | '''
Created on 11 Apr 2020
@author: dkr85djo
'''
class md:
MAXCARDS = 106
class MDColourSet:
def __init__(self, value, names, hexcolour=b'#000000'):
self.value = value
self.names = names
self.hexcolour = hexcolour
def size(self):
return len(self.names)
class MDCard: # ABSTRACT
def __init__(self, value, titleid, cardid=0, ctype=0):
self.value = value # monetary value: may be index or literal (direct)
self.title = titleid # descriptive text: index
self.cardid = cardid # explicit index, direct, implicit, set by drawpile generator
self.cardtype = ctype # card type: implicit, set by derived constructors
class MDMoneyCard(MDCard):
def __init__(self, value, titleid, inctype=4, colourset=None):
MDCard.__init__(self, value, titleid, ctype=inctype)
self.cardtype = inctype
self.colourset = colourset
class MDPropertyCard(MDCard):
def __init__(self, value, titleid, colourset=None, inctype=0):
MDCard.__init__(self, value, titleid, ctype=inctype)
self.colourset = colourset # if non wild card, 1 element
self.cardtype = inctype + min(len(colourset) - 1, 1)
# targetall = True => whichtarget=all
# targetall = False => whichtarget=$var (one of which is 'self')
class MDActionCard(MDCard):
def __init__(self, value, titleid, colourset=None, targetall=False, inctype=2):
MDCard.__init__(self, value, titleid, ctype=inctype)
self.targetall = targetall
self.colourset = colourset
self.cardtype = inctype + min(len(colourset) - 1, 1)
class MDCardCollection:
def __init__(self, ownerid, ispublic=False):
self.ownerid = ownerid
self.ispublic = ispublic
self.cards = []
def generate(self, cardset):
for i in range(len(cardset)):
newcardobject = cardset[i]
newcardobject.cardid = i # need to generate cardid here
self.cards.append(newcardobject) # we are agnostic to the type of card
def length(self): # FIXME: We should just use len()
return (len(self.cards))
def remove(self, index):
return self.cards.pop(index)
def add(self, cardobject):
self.cards.append(cardobject)
| 32.5 | 93 | 0.618295 | 275 | 2,405 | 5.276364 | 0.370909 | 0.062026 | 0.071675 | 0.096485 | 0.230186 | 0.214335 | 0.194349 | 0.194349 | 0 | 0 | 0 | 0.015707 | 0.285239 | 2,405 | 73 | 94 | 32.945205 | 0.828389 | 0.191684 | 0 | 0.212766 | 0 | 0 | 0.003644 | 0 | 0 | 0 | 0 | 0.013699 | 0 | 1 | 0.234043 | false | 0 | 0 | 0.06383 | 0.468085 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b4035037d4657ef3fa986a433de8a3332299eea4 | 249 | py | Python | crmapp/contacts/urls.py | byNio/crmeasy | 7d69ccc6741a0124c1dcbd45389b2b8bb7bae722 | [
"MIT"
] | 9 | 2015-01-09T08:52:12.000Z | 2018-08-03T06:07:19.000Z | crmapp/contacts/urls.py | byNio/crmeasy | 7d69ccc6741a0124c1dcbd45389b2b8bb7bae722 | [
"MIT"
] | 2 | 2015-01-14T10:56:02.000Z | 2020-08-18T22:28:18.000Z | crmapp/contacts/urls.py | byNio/crmeasy | 7d69ccc6741a0124c1dcbd45389b2b8bb7bae722 | [
"MIT"
] | 103 | 2015-07-01T11:12:17.000Z | 2022-01-29T19:18:47.000Z | from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
| 22.636364 | 78 | 0.666667 | 31 | 249 | 5.193548 | 0.548387 | 0.136646 | 0.236025 | 0.322981 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.156627 | 249 | 10 | 79 | 24.9 | 0.766667 | 0 | 0 | 0 | 0 | 0 | 0.425703 | 0.277108 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b40f90ad789af8f55939665f3bc3946c4f1f17ad | 947 | py | Python | packages/pyre/patterns/Accumulator.py | rtburns-jpl/pyre | ffc4fc1b2936e355f709d084eb4055954960b3a2 | [
"BSD-3-Clause"
] | null | null | null | packages/pyre/patterns/Accumulator.py | rtburns-jpl/pyre | ffc4fc1b2936e355f709d084eb4055954960b3a2 | [
"BSD-3-Clause"
] | 1 | 2021-06-10T23:42:13.000Z | 2021-06-10T23:42:13.000Z | packages/pyre/patterns/Accumulator.py | jlmaurer/pyre | 6af38a83621d7d6228d147b4bb94f97fbb10f6e2 | [
"BSD-3-Clause"
] | 2 | 2020-08-31T18:07:52.000Z | 2021-12-10T08:54:39.000Z | # -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2020 all rights reserved
#
# base class
from .CoFunctor import CoFunctor
# class declaration
class Accumulator(CoFunctor):
"""
A coroutine that accumulates data in a container
"""
# interface
def throw(self, errorTp, error=None, traceback=None):
"""
Handle exceptions
"""
# accumulators ignore errors
return
# meta-methods
def __init__(self, **kwds):
# initialize my cache
self.cache = []
# chain up
super().__init__(**kwds)
# all done
return
# my coroutine
def __call__(self):
"""
Store everything that comes in
"""
# for ever
while True:
# get the item
item = yield
# store it
self.cache.append(item)
# all done
return
# end of file
| 17.537037 | 57 | 0.533263 | 98 | 947 | 5.030612 | 0.704082 | 0.036511 | 0.052738 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015126 | 0.3717 | 947 | 53 | 58 | 17.867925 | 0.813445 | 0.388596 | 0 | 0.230769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.076923 | 0 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b41115adb98f5f1d3092bef70e248669408bc1eb | 831 | py | Python | config.py | bgildson/evolux-challenge | 77a41a987e3277221f650c9f1392aa240263a1fa | [
"MIT"
] | null | null | null | config.py | bgildson/evolux-challenge | 77a41a987e3277221f650c9f1392aa240263a1fa | [
"MIT"
] | null | null | null | config.py | bgildson/evolux-challenge | 77a41a987e3277221f650c9f1392aa240263a1fa | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
class BaseConfig:
DEBUG = False
TESTING = False
PROJECT_ROOT = os.path.abspath('.')
SQLALCHEMY_DATABASE_URI = f'sqlite:///{os.path.join(PROJECT_ROOT, "db.sqlite")}'
SQLALCHEMY_TRACK_MODIFICATIONS = False
JWT_SECRET_KEY = os.urandom(32)
JWT_IDENTITY_CLAIM = 'sub'
JWT_ERROR_MESSAGE_KEY = 'error'
JWT_HEADER_TYPE = 'JWT'
class DevelopmentConfig(BaseConfig):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgres://postgres:postgres@db:5432/evolux_challenge'
JWT_SECRET_KEY = 'mysupersecretkey'
class TestingConfig(BaseConfig):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class ProductionConfig(BaseConfig):
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI')
JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY')
| 21.307692 | 85 | 0.714801 | 98 | 831 | 5.755102 | 0.459184 | 0.159574 | 0.18617 | 0.049645 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010189 | 0.173285 | 831 | 38 | 86 | 21.868421 | 0.810771 | 0.025271 | 0 | 0 | 0 | 0 | 0.220297 | 0.139851 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.047619 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b414f69d756964cdfde2ddf66d76e932d095ecab | 924 | py | Python | mirari/TCS/migrations/0032_auto_20190308_1454.py | gcastellan0s/mirariapp | 24a9db06d10f96c894d817ef7ccfeec2a25788b7 | [
"MIT"
] | null | null | null | mirari/TCS/migrations/0032_auto_20190308_1454.py | gcastellan0s/mirariapp | 24a9db06d10f96c894d817ef7ccfeec2a25788b7 | [
"MIT"
] | 18 | 2019-12-27T19:58:20.000Z | 2022-02-27T08:17:49.000Z | mirari/TCS/migrations/0032_auto_20190308_1454.py | gcastellan0s/mirariapp | 24a9db06d10f96c894d817ef7ccfeec2a25788b7 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.5 on 2019-03-08 20:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('TCS', '0031_auto_20190228_1417'),
]
operations = [
migrations.AddField(
model_name='orderservice',
name='brandName',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='orderservice',
name='companyName',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='orderservice',
name='modeloName',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='orderservice',
name='storeName',
field=models.CharField(blank=True, max_length=250),
),
]
| 27.176471 | 63 | 0.575758 | 89 | 924 | 5.853933 | 0.460674 | 0.138196 | 0.176583 | 0.207294 | 0.644914 | 0.644914 | 0.56238 | 0.56238 | 0.483685 | 0.483685 | 0 | 0.067398 | 0.309524 | 924 | 33 | 64 | 28 | 0.749216 | 0.048701 | 0 | 0.592593 | 1 | 0 | 0.128848 | 0.026226 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.037037 | 0 | 0.148148 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b41efc41c00a6fd6ba0670461dff9ba6a5674030 | 2,906 | py | Python | src/relstorage/blobhelper/__init__.py | enfold/relstorage | 9fcd526b537cb6537cc2ae33154b63096550f210 | [
"ZPL-2.1"
] | 40 | 2015-10-08T05:35:13.000Z | 2022-03-28T23:50:06.000Z | src/relstorage/blobhelper/__init__.py | enfold/relstorage | 9fcd526b537cb6537cc2ae33154b63096550f210 | [
"ZPL-2.1"
] | 364 | 2015-03-23T15:25:42.000Z | 2022-03-17T08:41:34.000Z | src/relstorage/blobhelper/__init__.py | enfold/relstorage | 9fcd526b537cb6537cc2ae33154b63096550f210 | [
"ZPL-2.1"
] | 33 | 2015-06-08T23:03:22.000Z | 2022-03-21T08:25:53.000Z | ##############################################################################
#
# Copyright (c) 2009,2019 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Blob management utilities needed by RelStorage.
Most of this code is lifted from ZODB/ZEO.
"""
from __future__ import absolute_import
from __future__ import print_function
from ZODB.POSException import Unsupported
from zope.interface import implementer
from .interfaces import INoBlobHelper
__all__ = [
'BlobHelper',
]
@implementer(INoBlobHelper)
class NoBlobHelper(object):
# pylint:disable=unused-argument
__slots__ = ()
NEEDS_DB_LOCK_TO_FINISH = False
NEEDS_DB_LOCK_TO_VOTE = False
shared_blob_helper = False
txn_has_blobs = False
shared_blob_dir = None
def new_instance(self, adapter):
return self
vote = finish = lambda self, _tid=None: None
begin = abort = clear_temp = close = lambda self: None
def after_pack(self, oid_int, tid_int):
"""
Because there cannot be blobs, this method has nothing to do.
"""
def copy_undone(self, copied, tid):
"""
Because there cannot be blobs, this method has nothing to do.
"""
def loadBlob(self, cursor, oid, serial):
raise Unsupported("No blob directory is configured.")
def openCommittedBlobFile(self, cursor, oid, serial, blob=None):
raise Unsupported("No blob directory is configured.")
def temporaryDirectory(self):
raise Unsupported("No blob directory is configured.")
def storeBlob(self, cursor, store_func,
oid, serial, data, blobfilename, version, txn):
raise Unsupported("No blob directory is configured.")
def restoreBlob(self, cursor, oid, serial, blobfilename):
raise Unsupported("No blob directory is configured.")
@property
def fshelper(self):
raise AttributeError("NoBlobHelper has no 'fshelper'")
def __repr__(self):
return "<NoBlobHelper>"
def BlobHelper(options, adapter):
if options is None or not options.blob_dir:
return NoBlobHelper()
# Prevent warnings from runpy that these were found in sys.modules
# before executing .cached.
from .cached import CacheBlobHelper
from .shared import SharedBlobHelper
if options.shared_blob_dir:
return SharedBlobHelper(options, adapter)
return CacheBlobHelper(options, adapter)
| 29.653061 | 78 | 0.667928 | 340 | 2,906 | 5.570588 | 0.464706 | 0.042239 | 0.047518 | 0.058078 | 0.174762 | 0.174762 | 0.174762 | 0.152059 | 0.05491 | 0.05491 | 0 | 0.00434 | 0.207158 | 2,906 | 97 | 79 | 29.958763 | 0.817708 | 0.274948 | 0 | 0.108696 | 0 | 0 | 0.113528 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.23913 | false | 0 | 0.152174 | 0.043478 | 0.782609 | 0.021739 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b4296ccd3d69eca20bf7407f22dd4d811aa49e42 | 491 | py | Python | flask-api/forms.py | Drprincerajput/flask-adoption-api | 00d550ef0067182d3fab53ddbf6b34b8efee9ce4 | [
"MIT"
] | 1 | 2019-12-29T10:27:23.000Z | 2019-12-29T10:27:23.000Z | flask-api/forms.py | Drprincerajput/flask-adoption-api | 00d550ef0067182d3fab53ddbf6b34b8efee9ce4 | [
"MIT"
] | null | null | null | flask-api/forms.py | Drprincerajput/flask-adoption-api | 00d550ef0067182d3fab53ddbf6b34b8efee9ce4 | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, SubmitField
class AddForm(FlaskForm):
name = StringField('Name of Cat: ')
submit = SubmitField('Add Cat')
class AdOwnerForm(FlaskForm):
name = StringField('Name of Owner:')
cat_id = IntegerField("Id of Cats: ")
submit = SubmitField("Add Owner")
class DelForm(FlaskForm):
id = IntegerField('Id Number of Cat to Remove: ')
submit = SubmitField('Remove Cat')
| 23.380952 | 59 | 0.674134 | 56 | 491 | 5.875 | 0.428571 | 0.155015 | 0.145897 | 0.170213 | 0.182371 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.224033 | 491 | 20 | 60 | 24.55 | 0.863517 | 0 | 0 | 0 | 0 | 0 | 0.197452 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b429b23fe717760a00d8c497e4391d3981e9b1d9 | 375 | py | Python | models.py | cockroachlabs/simple-crud-python-sqlalchemy | 30e07a144927057cd46f253d92f717b5c052aa9b | [
"Apache-2.0"
] | 1 | 2022-02-23T09:50:53.000Z | 2022-02-23T09:50:53.000Z | models.py | cockroachlabs/example-app-python-sqlalchemy | 30e07a144927057cd46f253d92f717b5c052aa9b | [
"Apache-2.0"
] | 1 | 2021-06-24T21:40:37.000Z | 2021-06-24T21:40:37.000Z | models.py | cockroachlabs/example-app-python-sqlalchemy | 30e07a144927057cd46f253d92f717b5c052aa9b | [
"Apache-2.0"
] | 1 | 2021-06-24T20:12:49.000Z | 2021-06-24T20:12:49.000Z | from sqlalchemy import Column, Integer
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import declarative_base
Base = declarative_base()
class Account(Base):
"""The Account class corresponds to the "accounts" database table.
"""
__tablename__ = 'accounts'
id = Column(UUID(as_uuid=True), primary_key=True)
balance = Column(Integer)
| 26.785714 | 70 | 0.749333 | 46 | 375 | 5.934783 | 0.565217 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162667 | 375 | 13 | 71 | 28.846154 | 0.869427 | 0.168 | 0 | 0 | 0 | 0 | 0.026578 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.375 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
b4342c3397da20edc28cd4d9b8bdbaab8ddd8eca | 1,182 | py | Python | conanfile.py | pss146/nanoidxx | 6f24a0bd97fa60b6f34198adedd4bc19168f6427 | [
"MIT"
] | null | null | null | conanfile.py | pss146/nanoidxx | 6f24a0bd97fa60b6f34198adedd4bc19168f6427 | [
"MIT"
] | null | null | null | conanfile.py | pss146/nanoidxx | 6f24a0bd97fa60b6f34198adedd4bc19168f6427 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from conans import ConanFile, tools, CMake
class NanoIdXXConan(ConanFile):
name = "nanoidxx"
version = "0.1"
description = "A tiny, URL-friendly, unique string ID generator for C++, implementation of ai's nanoid!"
url = "https://github.com/pss146/nanoidxx"
homepage = "https://github.com/pss146/nanoidxx"
author = "Stanislav Perepelitsyn <stas.perepel@gmail.com>"
license = "MIT"
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def export_sources(self):
self.copy("*") # -> copies all files/folders from working dir into a “source” directory
def requirements(self):
self.requires.add("catch2/2.13.7")
def configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self.configure_cmake()
cmake.build()
cmake.test()
def package(self):
# Copy headers to the include folder and libraries to the lib folder
self.copy("*.h", dst="include", src="include")
def package_id(self):
self.info.header_only() | 28.829268 | 108 | 0.628596 | 147 | 1,182 | 5.013605 | 0.646259 | 0.032564 | 0.037992 | 0.054274 | 0.075984 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015573 | 0.239425 | 1,182 | 41 | 109 | 28.829268 | 0.804227 | 0.152284 | 0 | 0 | 0 | 0.035714 | 0.277277 | 0.024024 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.071429 | 0 | 0.678571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
b43fa65af3e936e6ceea88f8b35d7f76923ddddc | 224 | py | Python | catkin_ws/src/nuice_gui/nuice_dc_motor_rqt/scripts/nuice_dc_motor_rqt.py | SpyGuyIan/NUice | 47991a848dac244b4c476b4a92f7a27a1f9e5dcc | [
"MIT"
] | 1 | 2021-08-17T00:40:42.000Z | 2021-08-17T00:40:42.000Z | catkin_ws/src/nuice_gui/nuice_dc_motor_rqt/scripts/nuice_dc_motor_rqt.py | SpyGuyIan/NUice | 47991a848dac244b4c476b4a92f7a27a1f9e5dcc | [
"MIT"
] | 1 | 2021-01-31T17:15:40.000Z | 2021-01-31T17:15:40.000Z | catkin_ws/src/nuice_gui/nuice_dc_motor_rqt/scripts/nuice_dc_motor_rqt.py | NUMarsIce/NUice | 47991a848dac244b4c476b4a92f7a27a1f9e5dcc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys
from nuice_dc_motor_rqt.dc_motor_plugin import DcMotorPlugin
from rqt_gui.main import Main
plugin = 'nuice_dc_motor_rqt'
main = Main(filename=plugin)
sys.exit(main.main(standalone=plugin)) | 22.4 | 60 | 0.8125 | 37 | 224 | 4.675676 | 0.486486 | 0.121387 | 0.138728 | 0.17341 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 224 | 10 | 61 | 22.4 | 0.852217 | 0.089286 | 0 | 0 | 0 | 0 | 0.088235 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
b452346f8da664472d0050793f447134eed70924 | 687 | py | Python | example/simple_led.py | Griffin-Sullivan/Python-LED-Game | 0977daed9186328583b27fdb072550f8b1ae5460 | [
"MIT"
] | null | null | null | example/simple_led.py | Griffin-Sullivan/Python-LED-Game | 0977daed9186328583b27fdb072550f8b1ae5460 | [
"MIT"
] | null | null | null | example/simple_led.py | Griffin-Sullivan/Python-LED-Game | 0977daed9186328583b27fdb072550f8b1ae5460 | [
"MIT"
] | 2 | 2019-11-18T16:55:55.000Z | 2019-11-19T22:21:33.000Z | import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12,GPIO.OUT)
GPIO.setup(13,GPIO.OUT)
GPIO.setup(15,GPIO.OUT)
GPIO.setup(16,GPIO.OUT)
GPIO.setwarnings(False)
try:
for i in range(50):
GPIO.output(7,True)
GPIO.output(11,True)
GPIO.output(12,True)
GPIO.output(13,True)
GPIO.output(15,True)
GPIO.output(16,True)
time.sleep(1)
GPIO.output(7,False)
GPIO.output(11,False)
GPIO.output(12,False)
GPIO.output(13,False)
GPIO.output(15,False)
GPIO.output(16,False)
time.sleep(1)
finally:
GPIO.cleanup()
| 20.818182 | 29 | 0.620087 | 107 | 687 | 3.981308 | 0.271028 | 0.28169 | 0.15493 | 0.187793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070342 | 0.234352 | 687 | 32 | 30 | 21.46875 | 0.739544 | 0 | 0 | 0.071429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b453e003d7dbd85f2a6574e9a52f1ba71c10f9e6 | 795 | py | Python | web2py-appliances-master/SurveyAppFlourish/languages/it.py | wantsomechocolate/WantsomeBeanstalk | 8c8a0a80490d04ea52661a3114fd3db8de65a01e | [
"BSD-3-Clause"
] | null | null | null | web2py-appliances-master/SurveyAppFlourish/languages/it.py | wantsomechocolate/WantsomeBeanstalk | 8c8a0a80490d04ea52661a3114fd3db8de65a01e | [
"BSD-3-Clause"
] | null | null | null | web2py-appliances-master/SurveyAppFlourish/languages/it.py | wantsomechocolate/WantsomeBeanstalk | 8c8a0a80490d04ea52661a3114fd3db8de65a01e | [
"BSD-3-Clause"
] | null | null | null | {
'%Y-%m-%d':'%Y-%m-%d',
'%Y-%m-%d %H:%M:%S':'%Y-%m-%d %H:%M:%S',
'%s rows deleted':'%s records cancellati',
'%s rows updated':'*** %s records modificati',
'Hello World':'Salve Mondo',
'Invalid Query':'Query invalida',
'Sure you want to delete this object?':'Sicuro che vuoi cancellare questo oggetto?',
'Welcome to web2py':'Ciao da wek2py',
'click here for online examples':'clicca per vedere gli esempi',
'click here for the administrative interface':'clicca per l\'interfaccia administrativa',
'data uploaded':'dati caricati',
'db':'db',
'design':'progetta',
'done!':'fatto!',
'invalid request':'richiesta invalida!',
'new record inserted':'nuovo record inserito',
'record does not exist':'il record non esiste',
'state':'stato',
'unable to parse csv file':'non so leggere questo csv file'
}
| 36.136364 | 89 | 0.686792 | 119 | 795 | 4.588235 | 0.689076 | 0.014652 | 0.021978 | 0.014652 | 0.032967 | 0.032967 | 0 | 0 | 0 | 0 | 0 | 0.002837 | 0.113208 | 795 | 21 | 90 | 37.857143 | 0.771631 | 0 | 0 | 0 | 0 | 0 | 0.794969 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b45f75d93d48955d072d1c282b2545f745ac1762 | 5,136 | py | Python | scripts/cv-tests/flash-recognition.py | ayushajain/Drone | c98d2bab686e5b9bdacc45aef5abe49dd45a6ce0 | [
"RSA-MD"
] | null | null | null | scripts/cv-tests/flash-recognition.py | ayushajain/Drone | c98d2bab686e5b9bdacc45aef5abe49dd45a6ce0 | [
"RSA-MD"
] | null | null | null | scripts/cv-tests/flash-recognition.py | ayushajain/Drone | c98d2bab686e5b9bdacc45aef5abe49dd45a6ce0 | [
"RSA-MD"
] | null | null | null | import argparse
import cv2
import numpy as np
from flash import Flash
ap = argparse.ArgumentParser()
# generate arguments
ap.add_argument("-i", "--video", required=False, default="images/drone-updown-recog.MP4", help="Path to the video to be processed")
ap.add_argument("-t", "--threshold", required=False, default=40, help="Threshold limit")
ap.add_argument("-s", "--scale", required=False, default=0.5, help="Image scale size")
ap.add_argument("-b", "--blur", required=False, default=3, help="Blur amount")
ap.add_argument("-r", "--rotate", required=False, default=0, help="Image rotation amount")
args = vars(ap.parse_args())
# bit pattern length
BIT_PATTERN_LENGTH = 8
# frame interval
FRAME_INTERVAL = 5
# kernel for blob dilation
# changing the dimensions affects size of dilation.
kernel = np.ones((5, 5), np.float32) / 25
# single flash
flashes = []
# last frame for cv2.absoluteValue
last_frame = None
# frame count
frame_count = 0
# TEST: flash identity
flash_identity = 0
# TEST: get pattern from user
PATTERN = "01010111"
# TODO: create a state function for gps, flash detection/recognition, moving towards flash, regaining flash location
def main():
# frame display/saving
cap = cv2.VideoCapture(args['video'])
ret, frame = cap.read()
print frame.tolist()
while True:
# Capture frame-by-frame
ret, frame = cap.read()
# end capture once video is over
if frame is None:
break
filtered = perform_filters(frame)
if len(filtered['ROIS']) > 0:
identify_flash(filtered['ROIS'])
draw_flashes(filtered['origFrame'])
# display frames to window
cv2.imshow('BINARY_FILTER', filtered['binaryThresh'])
cv2.imshow('ORIGINAL_FRAME', filtered['origFrame'])
# quit video on keypress(q)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# close window and camera
cap.release()
cv2.destroyAllWindows()
def perform_filters(image):
global PATTERN
global last_frame
global frame_count
# resize frame to reduce processing times
image = cv2.resize(image, (0, 0), fx=float(args["scale"]), fy=float(args["scale"]))
# Greyscale and Blurring to eliminate
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
mask = cv2.GaussianBlur(gray, (int(args["blur"]), int(args["blur"])), 0)
pixels = []
diff = None
if last_frame is not None:
diff = cv2.absdiff(mask, last_frame)
(t, diff) = cv2.threshold(diff, float(args["threshold"]), 255, cv2.THRESH_BINARY)
# Find contours on threshold frame
contours = cv2.findContours(diff.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# iterate through all the contour shapes found
for c in contours[0]:
# grab information about contours
moments = cv2.moments(c)
center_x = 0
center_y = 0
try:
# determine center of contour
center_x = int((moments["m10"] / moments["m00"]))
center_y = int((moments["m01"] / moments["m00"]))
pixels.append({"location":(center_x, center_y), "value": gray[center_y][center_x]})
except ZeroDivisionError:
pass
# draw contours to frame with a circle at the center
cv2.circle(image, (center_x, center_y), 1, (0, 0, 255), -1)
# update last frame to current one
last_frame = mask
frame_count += 1
# return processed frames
return {'binaryThresh': image if diff is None else diff, 'origFrame': image, 'ROIS': pixels}
def draw_flashes(image):
# identify and flashes
for flash in flashes:
# print flash
cv2.putText(image, str(flash.identity), (flash.x, flash.y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 2)
if flash.equals_pattern(PATTERN):
cv2.rectangle(image, (flash.x - 15, flash.y - 15), (flash.x + 15, flash.y + 15), (0, 255, 0), 2)
def identify_flash(regions_of_interest):
global flash_identity
# iterate through flash pixels found in current frame
for roi in regions_of_interest:
# determines whether a flash object has already been created for the current pixel
flash_exists = False
# iterate through the flashes we determined in previous frames
for flash in flashes:
# TODO: change distance to pixel based on drone altitude and implement object tracking
# mean-shift calculation here
if flash.distance_to(roi['location']) < 30:
flash_exists = True
# push bit to flash and update location
if flash.last_update != frame_count:
flash.last_update = frame_count
flash.push_raw_bits(roi['value'])
flash.update_location(roi['location'])
# define a flash object if one does not already exist
if not flash_exists:
flashes.append(Flash(roi['location'], str(flash_identity)))
flash_identity += 1
if __name__ == '__main__':
main()
| 30.571429 | 131 | 0.633956 | 666 | 5,136 | 4.783784 | 0.369369 | 0.019774 | 0.020402 | 0.013183 | 0.027307 | 0.027307 | 0 | 0 | 0 | 0 | 0 | 0.026433 | 0.256036 | 5,136 | 167 | 132 | 30.754491 | 0.80738 | 0.243575 | 0 | 0.070588 | 1 | 0 | 0.095349 | 0.007534 | 0 | 0 | 0.001039 | 0.005988 | 0 | 0 | null | null | 0.011765 | 0.047059 | null | null | 0.011765 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b46b87c01c6703718c7a751df09ef18c15ead70d | 291 | py | Python | apps/love/tests/models.py | gannetson/sportschooldeopenlucht | 0c78e5a95b22a963244112e478119ba60c572141 | [
"BSD-3-Clause"
] | 1 | 2019-01-19T06:58:39.000Z | 2019-01-19T06:58:39.000Z | apps/love/tests/models.py | gannetson/sportschooldeopenlucht | 0c78e5a95b22a963244112e478119ba60c572141 | [
"BSD-3-Clause"
] | null | null | null | apps/love/tests/models.py | gannetson/sportschooldeopenlucht | 0c78e5a95b22a963244112e478119ba60c572141 | [
"BSD-3-Clause"
] | null | null | null | from django.db import models
from apps.love.models import LovableModel
class TestBlogPost(LovableModel):
"""
A model that is used for testing.
"""
title = models.CharField(max_length=200)
slug = models.SlugField()
def __unicode__(self):
return self.title
| 19.4 | 44 | 0.687285 | 36 | 291 | 5.416667 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013274 | 0.223368 | 291 | 14 | 45 | 20.785714 | 0.849558 | 0.113402 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.285714 | 0.142857 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
b46d08c0cc9314db484e0dda85923b82ece1b620 | 83 | py | Python | DrawShape/draw_sun.py | Yoshiyuki-Su/python_samples | 79374a23ab20098ba557bd7c5424400035175af7 | [
"MIT"
] | null | null | null | DrawShape/draw_sun.py | Yoshiyuki-Su/python_samples | 79374a23ab20098ba557bd7c5424400035175af7 | [
"MIT"
] | 6 | 2020-11-14T19:10:36.000Z | 2022-03-13T04:06:01.000Z | DrawShape/draw_sun.py | Yoshiyuki-Su/python_samples | 79374a23ab20098ba557bd7c5424400035175af7 | [
"MIT"
] | null | null | null | #太陽を描画
from turtle import *
for _ in range(36):
fd(250)
left(170)
input()
| 10.375 | 20 | 0.614458 | 13 | 83 | 3.846154 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 0.253012 | 83 | 7 | 21 | 11.857143 | 0.677419 | 0.060241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
b47fd5f9bc55e5155fd683076d80f2df62cb65b5 | 623 | py | Python | nxt/test/__init__.py | Mikfr83/nxt | 6e87bae5b17085cd5fdfbaa3ebc0d50fac00b613 | [
"MIT"
] | 53 | 2020-12-08T05:04:05.000Z | 2022-03-07T03:39:58.000Z | nxt/test/__init__.py | Mikfr83/nxt | 6e87bae5b17085cd5fdfbaa3ebc0d50fac00b613 | [
"MIT"
] | 30 | 2020-12-07T21:43:47.000Z | 2021-12-13T02:25:46.000Z | nxt/test/__init__.py | Mikfr83/nxt | 6e87bae5b17085cd5fdfbaa3ebc0d50fac00b613 | [
"MIT"
] | 9 | 2020-12-08T08:07:07.000Z | 2021-04-16T22:31:44.000Z | # Builtin
import os
import inspect
# NOTE: the behavior of inspect.currentframe() likely requires cpython.
# https://docs.python.org/2.7/library/inspect.html#inspect.currentframe
_this_file = inspect.getframeinfo(inspect.currentframe()).filename
TEST_DIR = os.path.dirname(os.path.abspath(_this_file))
def get_test_file_path(file_name):
"""Shortcut to get full path to desired test file.
Only assembles path, no validation.
:param file_name: Filename of testing file.
:type file_name: str
:return: Full path for given file name.
:rtype: str
"""
return os.path.join(TEST_DIR, file_name)
| 28.318182 | 71 | 0.739968 | 91 | 623 | 4.923077 | 0.538462 | 0.089286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00381 | 0.157303 | 623 | 21 | 72 | 29.666667 | 0.849524 | 0.561798 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
b48bef871750d0c2b3541932bc1d03f4c3f4cca2 | 8,398 | py | Python | toontown/coghq/boardbothq/DistributedBoardOfficeRoom.py | LittleNed/toontown-stride | 1252a8f9a8816c1810106006d09c8bdfe6ad1e57 | [
"Apache-2.0"
] | 1 | 2018-06-16T23:06:38.000Z | 2018-06-16T23:06:38.000Z | toontown/coghq/boardbothq/DistributedBoardOfficeRoom.py | NoraTT/Historical-Commits-Project-Altis-Source | fe88e6d07edf418f7de6ad5b3d9ecb3d0d285179 | [
"Apache-2.0"
] | null | null | null | toontown/coghq/boardbothq/DistributedBoardOfficeRoom.py | NoraTT/Historical-Commits-Project-Altis-Source | fe88e6d07edf418f7de6ad5b3d9ecb3d0d285179 | [
"Apache-2.0"
] | 4 | 2019-06-20T23:45:23.000Z | 2020-10-14T20:30:15.000Z | from direct.directnotify import DirectNotifyGlobal
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from pandac.PandaModules import *
import random
from toontown.coghq import FactoryEntityCreator
from toontown.coghq.boardbothq import BoardOfficeRoomBase, BoardOfficeRoom
from toontown.coghq.boardbothq import BoardOfficeRoomSpecs
from otp.level import DistributedLevel
from otp.level import LevelSpec, LevelConstants
from toontown.nametag.NametagGlobals import *
from toontown.toonbase import TTLocalizer
from toontown.toonbase.ToontownGlobals import *
from toontown.chat.ChatGlobals import CFThought, CFTimeout
if __dev__:
from otp.level import EditorGlobals
def getBoardOfficeRoomReadyPostName(doId):
return 'boardofficeRoomReady-%s' % doId
class DistributedBoardOfficeRoom(DistributedLevel.DistributedLevel, BoardOfficeRoomBase.BoardOfficeRoomBase, BoardOfficeRoom.BoardOfficeRoom):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBoardOfficeRoom')
EmulateEntrancePoint = False
def __init__(self, cr):
DistributedLevel.DistributedLevel.__init__(self, cr)
BoardOfficeRoomBase.BoardOfficeRoomBase.__init__(self)
BoardOfficeRoom.BoardOfficeRoom.__init__(self)
self.suitIds = []
self.suits = []
self.reserveSuits = []
self.joiningReserves = []
self.suitsInitialized = 0
self.goonClipPlanes = {}
self.boardoffice = None
def createEntityCreator(self):
return FactoryEntityCreator.FactoryEntityCreator(level=self)
def generate(self):
self.notify.debug('generate')
DistributedLevel.DistributedLevel.generate(self)
def delete(self):
del self.boardoffice
DistributedLevel.DistributedLevel.delete(self)
BoardOfficeRoom.BoardOfficeRoom.delete(self)
self.ignoreAll()
def setBoardOfficeId(self, boardofficeId):
self.notify.debug('boardofficeId: %s' % boardofficeId)
BoardOfficeRoomBase.BoardOfficeRoomBase.setBoardOfficeId(self, boardofficeId)
def setRoomId(self, roomId):
self.notify.debug('roomId: %s' % roomId)
BoardOfficeRoomBase.BoardOfficeRoomBase.setRoomId(self, roomId)
def setRoomNum(self, num):
self.notify.debug('roomNum: %s' % num)
BoardOfficeRoom.BoardOfficeRoom.setRoomNum(self, num)
def levelAnnounceGenerate(self):
self.notify.debug('levelAnnounceGenerate')
DistributedLevel.DistributedLevel.levelAnnounceGenerate(self)
specModule = BoardOfficeRoomSpecs.getBoardOfficeRoomSpecModule(self.roomId)
roomSpec = LevelSpec.LevelSpec(specModule)
if __dev__:
typeReg = self.getEntityTypeReg()
roomSpec.setEntityTypeReg(typeReg)
DistributedLevel.DistributedLevel.initializeLevel(self, roomSpec)
def getReadyPostName(self):
return getBoardOfficeRoomReadyPostName(self.doId)
def privGotSpec(self, levelSpec):
if __dev__:
if not levelSpec.hasEntityTypeReg():
typeReg = self.getEntityTypeReg()
levelSpec.setEntityTypeReg(typeReg)
DistributedLevel.DistributedLevel.privGotSpec(self, levelSpec)
BoardOfficeRoom.BoardOfficeRoom.enter(self)
self.acceptOnce('leavingBoardOffice', self.announceLeaving)
bboard.post(self.getReadyPostName())
def fixupLevelModel(self):
BoardOfficeRoom.BoardOfficeRoom.setGeom(self, self.geom)
BoardOfficeRoom.BoardOfficeRoom.initFloorCollisions(self)
def setBoardOffice(self, boardoffice):
self.boardoffice = boardoffice
def setBossConfronted(self, avId):
self.boardoffice.setBossConfronted(avId)
def setDefeated(self):
self.notify.info('setDefeated')
from toontown.coghq.boardbothq import DistributedBoardOffice
messenger.send(DistributedBoardOffice.DistributedBoardOffice.WinEvent)
def initVisibility(self, *args, **kw):
pass
def shutdownVisibility(self, *args, **kw):
pass
def lockVisibility(self, *args, **kw):
pass
def unlockVisibility(self, *args, **kw):
pass
def enterZone(self, *args, **kw):
pass
def updateVisibility(self, *args, **kw):
pass
def setVisibility(self, *args, **kw):
pass
def resetVisibility(self, *args, **kw):
pass
def handleVisChange(self, *args, **kw):
pass
def forceSetZoneThisFrame(self, *args, **kw):
pass
def getParentTokenForEntity(self, entId):
if __dev__:
pass
return 1000000 * self.roomNum + entId
def enterLtNotPresent(self):
BoardOfficeRoom.BoardOfficeRoom.enterLtNotPresent(self)
if __dev__:
bboard.removeIfEqual(EditorGlobals.EditTargetPostName, self)
self.ignore('f2')
def enterLtPresent(self):
BoardOfficeRoom.BoardOfficeRoom.enterLtPresent(self)
if __dev__:
bboard.post(EditorGlobals.EditTargetPostName, self)
if self.boardoffice is not None:
self.boardoffice.currentRoomName = BoardOfficeRoomSpecs.BoardOfficeRoomId2RoomName[self.roomId]
def printPos(self = self):
thisZone = self.getZoneNode(LevelConstants.UberZoneEntId)
pos = base.localAvatar.getPos(thisZone)
h = base.localAvatar.getH(thisZone)
roomName = BoardOfficeRoomSpecs.BoardOfficeRoomId2RoomName[self.roomId]
print 'boardoffice pos: %s, h: %s, room: %s' % (repr(pos), h, roomName)
if self.boardoffice is not None:
floorNum = self.boardoffice.floorNum
else:
floorNum = '???'
posStr = 'X: %.3f' % pos[0] + '\nY: %.3f' % pos[1] + '\nZ: %.3f' % pos[2] + '\nH: %.3f' % h + '\nboardofficeId: %s' % self.boardofficeId + '\nfloor: %s' % floorNum + '\nroomId: %s' % self.roomId + '\nroomName: %s' % roomName
base.localAvatar.setChatAbsolute(posStr, CFThought | CFTimeout)
return
self.accept('f2', printPos)
return
def handleSOSPanel(self, panel):
avIds = []
for avId in self.avIdList:
if base.cr.doId2do.get(avId):
avIds.append(avId)
panel.setFactoryToonIdList(avIds)
def disable(self):
self.notify.debug('disable')
BoardOfficeRoom.BoardOfficeRoom.exit(self)
if hasattr(self, 'suits'):
del self.suits
if hasattr(self, 'relatedObjectMgrRequest') and self.relatedObjectMgrRequest:
self.cr.relatedObjectMgr.abortRequest(self.relatedObjectMgrRequest)
del self.relatedObjectMgrRequest
bboard.remove(self.getReadyPostName())
DistributedLevel.DistributedLevel.disable(self)
def setSuits(self, suitIds, reserveSuitIds):
oldSuitIds = list(self.suitIds)
self.suitIds = suitIds
self.reserveSuitIds = reserveSuitIds
def reservesJoining(self):
pass
def getCogSpec(self, cogId):
cogSpecModule = BoardOfficeRoomSpecs.getCogSpecModule(self.roomId)
return cogSpecModule.CogData[cogId]
def getReserveCogSpec(self, cogId):
cogSpecModule = BoardOfficeRoomSpecs.getCogSpecModule(self.roomId)
return cogSpecModule.ReserveCogData[cogId]
def getBattleCellSpec(self, battleCellId):
cogSpecModule = BoardOfficeRoomSpecs.getCogSpecModule(self.roomId)
return cogSpecModule.BattleCells[battleCellId]
def getFloorOuchLevel(self):
return 8
def getTaskZoneId(self):
return self.boardofficeId
def getBossTaunt(self):
return TTLocalizer.BoardOfficeBossTaunt
def getBossBattleTaunt(self):
return TTLocalizer.BoardOfficeBossBattleTaunt
def __str__(self):
if hasattr(self, 'roomId'):
return '%s %s: %s' % (self.__class__.__name__, self.roomId, BoardOfficeRoomSpecs.BoardOfficeRoomId2RoomName[self.roomId])
else:
return 'DistributedBoardOfficeRoom'
def __repr__(self):
return str(self)
def reportModelSpecSyncError(self, msg): #we need this cause the unit spec and model Num do match to see what i mean un hash next line
self.notify.info('%s\n\nyour spec does not match the level model\nuse SpecUtil.updateSpec, then restart your AI and client' % msg)
pass | 36.833333 | 236 | 0.690641 | 763 | 8,398 | 7.528178 | 0.305374 | 0.020891 | 0.017409 | 0.024373 | 0.099756 | 0.052925 | 0.043872 | 0.030292 | 0.030292 | 0 | 0 | 0.003358 | 0.219933 | 8,398 | 228 | 237 | 36.833333 | 0.873454 | 0.010955 | 0 | 0.165746 | 0 | 0.005525 | 0.055141 | 0.014327 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.071823 | 0.088398 | null | null | 0.016575 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
81fdda20741f338cbaf54e1c4faa16604073358a | 252 | py | Python | productdb/app/commands/__init__.py | tspycher/python-productdb | 17970a681b32eb249b78fab7dbeaee9d63ca7c05 | [
"MIT"
] | null | null | null | productdb/app/commands/__init__.py | tspycher/python-productdb | 17970a681b32eb249b78fab7dbeaee9d63ca7c05 | [
"MIT"
] | null | null | null | productdb/app/commands/__init__.py | tspycher/python-productdb | 17970a681b32eb249b78fab7dbeaee9d63ca7c05 | [
"MIT"
] | null | null | null | __all__ = ['readfeed']
import importlib
def load_commands(manager):
for command in __all__:
x = importlib.import_module("%s.%s" % (__name__, command))
manager.add_command(str(command).lower(), getattr(x, str(command).title())())
| 25.2 | 85 | 0.666667 | 31 | 252 | 4.935484 | 0.645161 | 0.130719 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.170635 | 252 | 9 | 86 | 28 | 0.732057 | 0 | 0 | 0 | 0 | 0 | 0.051587 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
81fe31a6d45131f1153cfe4944b9b1e84e311ef2 | 56 | py | Python | web/db.py | hkpeprah/television-2.0 | 8d6ce9cc7c8e22447769845159464422153cb8f6 | [
"MIT"
] | null | null | null | web/db.py | hkpeprah/television-2.0 | 8d6ce9cc7c8e22447769845159464422153cb8f6 | [
"MIT"
] | null | null | null | web/db.py | hkpeprah/television-2.0 | 8d6ce9cc7c8e22447769845159464422153cb8f6 | [
"MIT"
] | null | null | null | DATABASE_NAME = 'telepebble'
DATABASE_ALIAS = 'default'
| 18.666667 | 28 | 0.785714 | 6 | 56 | 7 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.107143 | 56 | 2 | 29 | 28 | 0.84 | 0 | 0 | 0 | 0 | 0 | 0.303571 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c302b584c787f61277a81aa19ac66b39f18b31fe | 639 | py | Python | Applications/SlicerApp/Testing/Python/SlicerSceneObserverTest.py | forfullstack/slicersources-src | 91bcecf037a27f3fad4c0ab57e8286fc258bb0f5 | [
"Apache-2.0"
] | null | null | null | Applications/SlicerApp/Testing/Python/SlicerSceneObserverTest.py | forfullstack/slicersources-src | 91bcecf037a27f3fad4c0ab57e8286fc258bb0f5 | [
"Apache-2.0"
] | null | null | null | Applications/SlicerApp/Testing/Python/SlicerSceneObserverTest.py | forfullstack/slicersources-src | 91bcecf037a27f3fad4c0ab57e8286fc258bb0f5 | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import unittest
import slicer
import vtk
class testClass(object):
""" Check that slicer exits correctly after adding an observer to the mrml scene
"""
def callback(self, caller, event):
print('Got %s from %s' % (event, caller))
def setUp(self):
print("Adding observer to the scene")
self.tag = slicer.mrmlScene.AddObserver(vtk.vtkCommand.ModifiedEvent, self.callback)
print("Modify the scene")
slicer.mrmlScene.Modified()
class SlicerSceneObserverTest(unittest.TestCase):
def setUp(self):
pass
def test_testClass(self):
test = testClass()
test.setUp()
| 24.576923 | 88 | 0.72144 | 81 | 639 | 5.617284 | 0.518519 | 0.043956 | 0.057143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176839 | 639 | 25 | 89 | 25.56 | 0.865019 | 0.118936 | 0 | 0.111111 | 0 | 0 | 0.104882 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0.055556 | 0.222222 | 0 | 0.555556 | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
c307c36602cc31599f6793636c4a11c928c3b86e | 1,601 | py | Python | nextbusstats/routes/migrations/0001_initial.py | Remiz/nextbus-route-analyzer | 04c2d940773c721dccf094455488e796f9d42bbe | [
"Apache-2.0"
] | 2 | 2016-02-06T01:15:41.000Z | 2016-04-08T17:35:48.000Z | nextbusstats/routes/migrations/0001_initial.py | Remiz/nextbus-route-analyzer | 04c2d940773c721dccf094455488e796f9d42bbe | [
"Apache-2.0"
] | null | null | null | nextbusstats/routes/migrations/0001_initial.py | Remiz/nextbus-route-analyzer | 04c2d940773c721dccf094455488e796f9d42bbe | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-15 21:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag', models.CharField(max_length=10)),
('title', models.CharField(max_length=100)),
('color', models.CharField(max_length=6)),
('opposite_color', models.CharField(max_length=6)),
('lat_min', models.FloatField()),
('lat_max', models.FloatField()),
('lon_min', models.FloatField()),
('lon_max', models.FloatField()),
],
),
migrations.CreateModel(
name='Stop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag', models.CharField(max_length=10)),
('stop_id', models.CharField(max_length=10, null=True)),
('title', models.CharField(max_length=200)),
('lat', models.FloatField()),
('lon', models.FloatField()),
('route', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stops', to='routes.Route')),
],
),
]
| 36.386364 | 131 | 0.554653 | 160 | 1,601 | 5.3875 | 0.4125 | 0.12181 | 0.146172 | 0.194896 | 0.417633 | 0.320186 | 0.25058 | 0.25058 | 0.25058 | 0.25058 | 0 | 0.025732 | 0.296065 | 1,601 | 43 | 132 | 37.232558 | 0.73913 | 0.0406 | 0 | 0.342857 | 1 | 0 | 0.075016 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.085714 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c30c2f6d0e8783e9afaebcbd41d1401ccbdb7798 | 154 | py | Python | py_tea_code/2.mypro_io/file13.py | qq4215279/study_python | b0eb9dedfc4abb2fd6c024a599e7375869c3d77a | [
"Apache-2.0"
] | null | null | null | py_tea_code/2.mypro_io/file13.py | qq4215279/study_python | b0eb9dedfc4abb2fd6c024a599e7375869c3d77a | [
"Apache-2.0"
] | null | null | null | py_tea_code/2.mypro_io/file13.py | qq4215279/study_python | b0eb9dedfc4abb2fd6c024a599e7375869c3d77a | [
"Apache-2.0"
] | null | null | null | #coding=utf-8
#使用递归计算n的阶乘(5!=5*4*3*2*1)
def factorial(n):
if n==1:
return n
else:
return n*factorial(n-1)
print(factorial(5))
| 11.846154 | 31 | 0.564935 | 27 | 154 | 3.222222 | 0.592593 | 0.229885 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.087719 | 0.25974 | 154 | 12 | 32 | 12.833333 | 0.675439 | 0.233766 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.5 | 0.166667 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c31d8ee057ec88bacd3aea380bbb0f28185c0956 | 866 | py | Python | davis/models/forms/login.py | deanwang539/davis_v0.1 | 36416b4495fbe8ae15994c4d0e009b8955bef7ba | [
"MIT"
] | null | null | null | davis/models/forms/login.py | deanwang539/davis_v0.1 | 36416b4495fbe8ae15994c4d0e009b8955bef7ba | [
"MIT"
] | null | null | null | davis/models/forms/login.py | deanwang539/davis_v0.1 | 36416b4495fbe8ae15994c4d0e009b8955bef7ba | [
"MIT"
] | null | null | null | from davis.models.database.gsdb import Gsdb
from davis.models.utils.crypto import compare_psw
# Users columns
UID_COL = 3
USERNAME_COL = 4
PASSWORD_COL = 5
class Login(object):
def __init__(self, ws):
self.worksheet = Gsdb(ws)
def verify_user(self, username, psw):
cell = self.worksheet.find_cell(USERNAME_COL, username)
if not cell:
return False
else:
return compare_psw(psw, self.worksheet.get_cell(cell.row, PASSWORD_COL).value)
def get_uid(self, username):
cell = self.worksheet.find_cell(USERNAME_COL, username)
return self.worksheet.get_cell(cell.row, UID_COL).value
def get_info(self, username, targets=None):
cell = self.worksheet.find_cell(USERNAME_COL, username)
return self.worksheet.get_row_values(cell.row, targets)
| 30.928571 | 91 | 0.67321 | 117 | 866 | 4.777778 | 0.367521 | 0.162791 | 0.091234 | 0.112701 | 0.382826 | 0.382826 | 0.314848 | 0.314848 | 0.236136 | 0.236136 | 0 | 0.004532 | 0.235566 | 866 | 27 | 92 | 32.074074 | 0.839879 | 0.015012 | 0 | 0.15 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0.1 | 0.1 | 0 | 0.55 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
c32549a4af8bb33b984d01f37181e25159978714 | 1,195 | py | Python | src/lower.py | JoelLefkowitz/convert-case | 192f2153f7f6327ad682a52ed3adb888ac2e3bba | [
"MIT"
] | 2 | 2021-09-09T22:55:08.000Z | 2021-11-12T00:41:09.000Z | src/lower.py | JoelLefkowitz/convert-case | 192f2153f7f6327ad682a52ed3adb888ac2e3bba | [
"MIT"
] | null | null | null | src/lower.py | JoelLefkowitz/convert-case | 192f2153f7f6327ad682a52ed3adb888ac2e3bba | [
"MIT"
] | null | null | null | from .camel import camel_to_lower_case, is_camel_case
from .definitions import LOWER
from .exceptions import MixedCaseError
from .kebab import is_kebab_case, kebab_to_lower_case
from .pascal import is_pascal_case, pascal_to_lower_case
from .sentence import is_sentence_case, sentence_to_lower_case
from .snake import is_snake_case, snake_to_lower_case
from .title import is_title_case, title_to_lower_case
from .upper import is_upper_case, upper_to_lower_case
TO_LOWER = [
(is_lower_case, lambda x: x),
(is_upper_case, upper_to_lower_case),
(is_sentence_case, sentence_to_lower_case),
(is_title_case, title_to_lower_case),
(is_camel_case, camel_to_lower_case),
(is_snake_case, snake_to_lower_case),
(is_kebab_case, kebab_to_lower_case),
(is_pascal_case, pascal_to_lower_case),
]
def lower_case(string: str) -> str:
try:
return next(to_case(string) for is_case, to_case in TO_LOWER if is_case(string))
# Throwing an error introduces impurity but is preferable to
# returning an empty result.
except StopIteration:
raise MixedCaseError(string)
def is_lower_case(string: str) -> bool:
return LOWER.match(string) is not None
| 34.142857 | 88 | 0.779079 | 191 | 1,195 | 4.450262 | 0.230366 | 0.18 | 0.181176 | 0.107059 | 0.463529 | 0.438824 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0.153138 | 1,195 | 34 | 89 | 35.147059 | 0.839921 | 0.07113 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.346154 | 0.038462 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
c33942f4064f1927bd668c468c088ea56a87441f | 707 | py | Python | codeEval/medium/a_pile_of_bricks.py | gauravsingh58/algo | 397859a53429e7a585e5f6964ad24146c6261326 | [
"WTFPL"
] | 1 | 2020-09-30T19:53:08.000Z | 2020-09-30T19:53:08.000Z | codeEval/medium/a_pile_of_bricks.py | gauravsingh58/algo | 397859a53429e7a585e5f6964ad24146c6261326 | [
"WTFPL"
] | null | null | null | codeEval/medium/a_pile_of_bricks.py | gauravsingh58/algo | 397859a53429e7a585e5f6964ad24146c6261326 | [
"WTFPL"
] | 1 | 2020-10-15T09:10:57.000Z | 2020-10-15T09:10:57.000Z | import sys
def strip_split(x):
return map(int, x.strip('[]').split(','))
def diff(x):
return abs(x[0] - x[1])
def is_possible(h, b):
return h[0] >= b[0] and h[1] >= b[1]
def dim(*args):
return sorted(map(diff, zip(*map(strip_split, args))))
def can_pass_through(hole, piles):
return ','.join(map(str, sorted(int(i) for i, x, y in piles if is_possible(hole, dim(x, y)))))
test_cases = open(sys.argv[1], 'r')
for test in test_cases:
hole, piles = test.strip().split('|')
r = can_pass_through(sorted(map(diff, zip(*map(strip_split, hole.split())))),
map(lambda e: e.strip('()').split(), piles.split(';')))
print r if r else '-'
test_cases.close()
| 28.28 | 98 | 0.594059 | 120 | 707 | 3.4 | 0.366667 | 0.147059 | 0.063725 | 0.078431 | 0.142157 | 0.142157 | 0.142157 | 0 | 0 | 0 | 0 | 0.012281 | 0.193777 | 707 | 24 | 99 | 29.458333 | 0.703509 | 0 | 0 | 0 | 0 | 0 | 0.014144 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.111111 | 0.055556 | null | null | 0.055556 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
c342f1f7d587af376dd6edcdcec4f46d6a9cd574 | 2,724 | py | Python | dlkit/json_/assessment_authoring/record_templates.py | UOC/dlkit | a9d265db67e81b9e0f405457464e762e2c03f769 | [
"MIT"
] | 2 | 2018-02-23T12:16:11.000Z | 2020-10-08T17:54:24.000Z | dlkit/json_/assessment_authoring/record_templates.py | UOC/dlkit | a9d265db67e81b9e0f405457464e762e2c03f769 | [
"MIT"
] | 87 | 2017-04-21T18:57:15.000Z | 2021-12-13T19:43:57.000Z | dlkit/json_/assessment_authoring/record_templates.py | UOC/dlkit | a9d265db67e81b9e0f405457464e762e2c03f769 | [
"MIT"
] | 1 | 2018-03-01T16:44:25.000Z | 2018-03-01T16:44:25.000Z | """JSON implementations of assessment.authoring records."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from .. import utilities
from ..osid import records as osid_records
from dlkit.abstract_osid.assessment_authoring import records as abc_assessment_authoring_records
class AssessmentPartRecord(abc_assessment_authoring_records.AssessmentPartRecord, osid_records.OsidRecord):
"""A record for an ``AssessmentPart``.
The methods specified by the record type are available through the
underlying object.
"""
class AssessmentPartQueryRecord(abc_assessment_authoring_records.AssessmentPartQueryRecord, osid_records.OsidRecord):
"""A record for an ``AssessmentPartQuery``.
The methods specified by the record type are available through the
underlying object.
"""
class AssessmentPartFormRecord(abc_assessment_authoring_records.AssessmentPartFormRecord, osid_records.OsidRecord):
"""A record for an ``AssessmentPartForm``.
The methods specified by the record type are available through the
underlying object.
"""
class AssessmentPartSearchRecord(abc_assessment_authoring_records.AssessmentPartSearchRecord, osid_records.OsidRecord):
"""A record for an ``AssessmentPartSearch``.
The methods specified by the record type are available through the
underlying object.
"""
class SequenceRuleRecord(abc_assessment_authoring_records.SequenceRuleRecord, osid_records.OsidRecord):
"""A record for a ``SequenceRule``.
The methods specified by the record type are available through the
underlying object.
"""
class SequenceRuleQueryRecord(abc_assessment_authoring_records.SequenceRuleQueryRecord, osid_records.OsidRecord):
"""A record for a ``SequenceRuleQuery``.
The methods specified by the record type are available through the
underlying object.
"""
class SequenceRuleFormRecord(abc_assessment_authoring_records.SequenceRuleFormRecord, osid_records.OsidRecord):
"""A record for a ``SequenceRuleForm``.
The methods specified by the record type are available through the
underlying object.
"""
class SequenceRuleSearchRecord(abc_assessment_authoring_records.SequenceRuleSearchRecord, osid_records.OsidRecord):
"""A record for a ``SequenceRuleSearch``.
The methods specified by the record type are available through the
underlying object.
"""
| 30.954545 | 119 | 0.7779 | 303 | 2,724 | 6.854785 | 0.234323 | 0.100626 | 0.125181 | 0.125662 | 0.419355 | 0.419355 | 0.419355 | 0.294174 | 0.294174 | 0.294174 | 0 | 0 | 0.153451 | 2,724 | 87 | 120 | 31.310345 | 0.900694 | 0.520558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.272727 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
c34ed41cec82806bb514158b4870cf170ae75e98 | 1,043 | py | Python | setup.py | michaelnugent/logbisect | 9423422c0b4d8b2dfa007999fecfee33a01123f6 | [
"BSD-3-Clause"
] | null | null | null | setup.py | michaelnugent/logbisect | 9423422c0b4d8b2dfa007999fecfee33a01123f6 | [
"BSD-3-Clause"
] | null | null | null | setup.py | michaelnugent/logbisect | 9423422c0b4d8b2dfa007999fecfee33a01123f6 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
from setuptools import setup
from sys import version_info
assert version_info >= (3, 7, 0), "logbisect requires >= Python 3.7"
INSTALL_REQUIRES = ["click", "python-dateutil"]
setup(
name="logbisect",
version="1.0.0",
description=("search huge log files quickly"),
long_description="quickly search huge files containing lines with ordered timestamps using bisection",
packages=["logbisect"],
url="http://github.com/michaelnugent",
author="Mike Nugent",
author_email="michael@michaelnugent.org",
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Development Status :: 3 - Alpha",
],
python_requires=">=3.7",
install_requires=INSTALL_REQUIRES,
entry_points={"console_scripts": ["logbisect = logbisect.cli:begin"]},
) | 34.766667 | 107 | 0.641419 | 115 | 1,043 | 5.730435 | 0.556522 | 0.063733 | 0.189681 | 0.197269 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02454 | 0.2186 | 1,043 | 30 | 108 | 34.766667 | 0.784049 | 0.020134 | 0 | 0 | 0 | 0 | 0.527694 | 0.025176 | 0 | 0 | 0 | 0 | 0.04 | 1 | 0 | false | 0 | 0.08 | 0 | 0.08 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c35343b8f66589c125300c8f1e37c12a7ee44d22 | 16,066 | py | Python | libc/tools/gensyscalls.py | cfriedt/bionic | d65ab1d9017fe191208974be49aa244688177e26 | [
"BSD-2-Clause"
] | 18 | 2015-06-28T23:08:40.000Z | 2021-11-19T17:55:25.000Z | libc/tools/gensyscalls.py | cfriedt/bionic | d65ab1d9017fe191208974be49aa244688177e26 | [
"BSD-2-Clause"
] | 1 | 2015-12-05T14:20:14.000Z | 2015-12-05T14:20:14.000Z | libc/tools/gensyscalls.py | cfriedt/bionic | d65ab1d9017fe191208974be49aa244688177e26 | [
"BSD-2-Clause"
] | 4 | 2017-12-19T12:22:07.000Z | 2022-01-25T16:07:47.000Z | #!/usr/bin/python
#
# this tool is used to generate the syscall assmbler templates
# to be placed into arch-x86/syscalls, as well as the content
# of arch-x86/linux/_syscalls.h
#
import sys, os.path, glob, re, commands, filecmp, shutil
from bionic_utils import *
if sys.version_info.major != 2:
print "error: please use Python 2 with this script. Your version is"
print "%s" % (sys.version)
sys.exit(1)
# set this to 1 if you want to generate thumb stubs
gen_thumb_stubs = 0
# set this to 1 if you want to generate ARM EABI stubs
gen_eabi_stubs = 1
# get the root Bionic directory, simply this script's dirname
#
bionic_root = find_bionic_root()
if not bionic_root:
print "could not find the Bionic root directory. aborting"
sys.exit(1)
if bionic_root[-1] != '/':
bionic_root += "/"
#print "bionic_root is %s" % bionic_root
#print "syscalls.stamp is '%s'" % syscalls_stamp
# temp directory where we store all intermediate files
bionic_temp = ""
# all architectures, update as you see fit
all_archs = [ "arm", "x86" ]
def make_dir( path ):
if not os.path.exists(path):
parent = os.path.dirname(path)
if parent:
make_dir(parent)
os.mkdir(path)
def create_file( relpath ):
abspath = bionic_temp + "/" + relpath
dir = os.path.dirname( abspath )
make_dir(dir)
return open( abspath, "w" )
# x86 assembler templates for each syscall stub
#
x86_header = """/* autogenerated by gensyscalls.py */
#include <linux/err.h>
#include <machine/asm.h>
#include <sys/linux-syscalls.h>
ENTRY(%(fname)s)
"""
x86_registers = [ "%ebx", "%ecx", "%edx", "%esi", "%edi", "%ebp" ]
x86_call = """ movl $%(idname)s, %%eax
int $0x80
cmpl $-MAX_ERRNO, %%eax
jb 1f
negl %%eax
pushl %%eax
call __set_errno
addl $4, %%esp
orl $-1, %%eax
1:
"""
x86_return = """ ret
END(%(fname)s)
"""
# ARM assembler templates for each syscall stub
#
arm_header = """/* autogenerated by gensyscalls.py */
#include <machine/asm.h>
#include <sys/linux-syscalls.h>
ENTRY(%(fname)s)
"""
arm_footer = """\
END(%(fname)s)
"""
arm_call_default = arm_header + """\
swi #%(idname)s
movs r0, r0
bxpl lr
b __set_syscall_errno
""" + arm_footer
arm_call_long = arm_header + """\
.save {r4, r5, lr}
stmfd sp!, {r4, r5, lr}
ldr r4, [sp, #12]
ldr r5, [sp, #16]
swi # %(idname)s
ldmfd sp!, {r4, r5, lr}
movs r0, r0
bxpl lr
b __set_syscall_errno
""" + arm_footer
arm_eabi_call_default = arm_header + """\
.save {r4, r7}
stmfd sp!, {r4, r7}
ldr r7, =%(idname)s
swi #0
ldmfd sp!, {r4, r7}
movs r0, r0
bxpl lr
b __set_syscall_errno
""" + arm_footer
arm_eabi_call_long = arm_header + """\
mov ip, sp
.save {r4, r5, r6, r7}
stmfd sp!, {r4, r5, r6, r7}
ldmfd ip, {r4, r5, r6}
ldr r7, =%(idname)s
swi #0
ldmfd sp!, {r4, r5, r6, r7}
movs r0, r0
bxpl lr
b __set_syscall_errno
""" + arm_footer
# ARM thumb assembler templates for each syscall stub
#
thumb_header = """/* autogenerated by gensyscalls.py */
.text
.type %(fname)s, #function
.globl %(fname)s
.align 4
.thumb_func
.fnstart
.syntax unified
#ifndef __thumb__
#define __thumb__
#endif
#include <sys/linux-syscalls.h>
%(fname)s:
"""
thumb_call_default = thumb_header + """\
.save {r7,lr}
push {r7,lr}
ldr r7, =%(idname)s
swi #0
tst r0, r0
bmi 1f
pop {r7,pc}
1:
rsbs r0, r0, #0
ldr r1, =__set_errno
blx r1
pop {r7,pc}
.fnend
"""
thumb_call_long = thumb_header + """\
.save {r4,r5,r7,lr}
push {r4,r5,r7,lr}
ldr r4, [sp,#16]
ldr r5, [sp,#20]
ldr r7, =%(idname)s
swi #0
tst r0, r0
bmi 1f
pop {r4,r5,r7,pc}
1:
rsbs r0, r0, #0
ldr r1, =__set_errno
blx r1
pop {r4,r5,r7,pc}
.fnend
"""
def param_uses_64bits(param):
"""Returns True iff a syscall parameter description corresponds
to a 64-bit type."""
param = param.strip()
# First, check that the param type begins with one of the known
# 64-bit types.
if not ( \
param.startswith("int64_t") or param.startswith("uint64_t") or \
param.startswith("loff_t") or param.startswith("off64_t") or \
param.startswith("long long") or param.startswith("unsigned long long") or
param.startswith("signed long long") ):
return False
# Second, check that there is no pointer type here
if param.find("*") >= 0:
return False
# Ok
return True
def count_arm_param_registers(params):
"""This function is used to count the number of register used
to pass parameters when invoking a thumb or ARM system call.
This is because the ARM EABI mandates that 64-bit quantities
must be passed in an even+odd register pair. So, for example,
something like:
foo(int fd, off64_t pos)
would actually need 4 registers:
r0 -> int
r1 -> unused
r2-r3 -> pos
"""
count = 0
for param in params:
if param_uses_64bits(param):
if (count & 1) != 0:
count += 1
count += 2
else:
count += 1
return count
def count_generic_param_registers(params):
count = 0
for param in params:
if param_uses_64bits(param):
count += 2
else:
count += 1
return count
class State:
def __init__(self):
self.old_stubs = []
self.new_stubs = []
self.other_files = []
self.syscalls = []
def x86_genstub(self, fname, numparams, idname):
t = { "fname" : fname,
"idname" : idname }
result = x86_header % t
stack_bias = 4
for r in range(numparams):
result += " pushl " + x86_registers[r] + "\n"
stack_bias += 4
for r in range(numparams):
result += " mov %d(%%esp), %s" % (stack_bias+r*4, x86_registers[r]) + "\n"
result += x86_call % t
for r in range(numparams):
result += " popl " + x86_registers[numparams-r-1] + "\n"
result += x86_return % t
return result
def x86_genstub_cid(self, fname, numparams, idname, cid):
# We'll ignore numparams here because in reality, if there is a
# dispatch call (like a socketcall syscall) there are actually
# only 2 arguments to the syscall and 2 regs we have to save:
# %ebx <--- Argument 1 - The call id of the needed vectored
# syscall (socket, bind, recv, etc)
# %ecx <--- Argument 2 - Pointer to the rest of the arguments
# from the original function called (socket())
t = { "fname" : fname,
"idname" : idname }
result = x86_header % t
stack_bias = 4
# save the regs we need
result += " pushl %ebx" + "\n"
stack_bias += 4
result += " pushl %ecx" + "\n"
stack_bias += 4
# set the call id (%ebx)
result += " mov $%d, %%ebx" % (cid) + "\n"
# set the pointer to the rest of the args into %ecx
result += " mov %esp, %ecx" + "\n"
result += " addl $%d, %%ecx" % (stack_bias) + "\n"
# now do the syscall code itself
result += x86_call % t
# now restore the saved regs
result += " popl %ecx" + "\n"
result += " popl %ebx" + "\n"
# epilog
result += x86_return % t
return result
def arm_genstub(self,fname, flags, idname):
t = { "fname" : fname,
"idname" : idname }
if flags:
numargs = int(flags)
if numargs > 4:
return arm_call_long % t
return arm_call_default % t
def arm_eabi_genstub(self,fname, flags, idname):
t = { "fname" : fname,
"idname" : idname }
if flags:
numargs = int(flags)
if numargs > 4:
return arm_eabi_call_long % t
return arm_eabi_call_default % t
def thumb_genstub(self,fname, flags, idname):
t = { "fname" : fname,
"idname" : idname }
if flags:
numargs = int(flags)
if numargs > 4:
return thumb_call_long % t
return thumb_call_default % t
def superh_genstub(self, fname, flags, idname):
numargs = int(flags)
t = { "fname" : fname,
"idname" : idname,
"numargs" : numargs }
superh_call = superh_header
if flags:
if numargs == 5:
superh_call += superh_5args_header
if numargs == 6:
superh_call += superh_6args_header
if numargs == 7:
superh_call += superh_7args_header
superh_call += superh_call_default
return superh_call % t
def process_file(self,input):
parser = SysCallsTxtParser()
parser.parse_file(input)
self.syscalls = parser.syscalls
parser = None
for t in self.syscalls:
syscall_func = t["func"]
syscall_params = t["params"]
syscall_name = t["name"]
if t["id"] >= 0:
num_regs = count_arm_param_registers(syscall_params)
if gen_thumb_stubs:
t["asm-thumb"] = self.thumb_genstub(syscall_func,num_regs,"__NR_"+syscall_name)
else:
if gen_eabi_stubs:
t["asm-arm"] = self.arm_eabi_genstub(syscall_func,num_regs,"__NR_"+syscall_name)
else:
t["asm-arm"] = self.arm_genstub(syscall_func,num_regs,"__NR_"+syscall_name)
if t["id2"] >= 0:
num_regs = count_generic_param_registers(syscall_params)
if t["cid"] >= 0:
t["asm-x86"] = self.x86_genstub_cid(syscall_func, num_regs, "__NR_"+syscall_name, t["cid"])
else:
t["asm-x86"] = self.x86_genstub(syscall_func, num_regs, "__NR_"+syscall_name)
elif t["cid"] >= 0:
E("cid for dispatch syscalls is only supported for x86 in "
"'%s'" % syscall_name)
return
def gen_NR_syscall(self,fp,name,id):
fp.write( "#define __NR_%-25s (__NR_SYSCALL_BASE + %d)\n" % (name,id) )
# now dump the content of linux/_syscalls.h
def gen_linux_syscalls_h(self,odir):
path = "libc/include/sys/linux-syscalls.h"
D( "generating "+path )
fp = create_file( path )
fp.write( "/* auto-generated by gensyscalls.py, do not touch */\n" )
fp.write( "#ifndef _BIONIC_LINUX_SYSCALLS_H_\n\n" )
fp.write( "#if !defined __ASM_ARM_UNISTD_H && !defined __ASM_I386_UNISTD_H\n" )
fp.write( "#if defined __arm__ && !defined __ARM_EABI__ && !defined __thumb__\n" )
fp.write( " # define __NR_SYSCALL_BASE 0x900000\n" )
fp.write( " #else\n" )
fp.write( " # define __NR_SYSCALL_BASE 0\n" )
fp.write( " #endif\n\n" )
# first, all common syscalls
for sc in self.syscalls:
sc_id = sc["id"]
sc_id2 = sc["id2"]
sc_name = sc["name"]
if sc_id == sc_id2 and sc_id >= 0:
self.gen_NR_syscall( fp, sc_name, sc_id )
# now, all arm-specific syscalls
fp.write( "\n#ifdef __arm__\n" );
for sc in self.syscalls:
sc_id = sc["id"]
sc_id2 = sc["id2"]
sc_name = sc["name"]
if sc_id != sc_id2 and sc_id >= 0:
self.gen_NR_syscall( fp, sc_name, sc_id )
fp.write( "#endif\n" );
gen_syscalls = {}
# finally, all i386-specific syscalls
fp.write( "\n#ifdef __i386__\n" );
for sc in self.syscalls:
sc_id = sc["id"]
sc_id2 = sc["id2"]
sc_name = sc["name"]
if sc_id != sc_id2 and sc_id2 >= 0 and sc_name not in gen_syscalls:
self.gen_NR_syscall( fp, sc_name, sc_id2 )
gen_syscalls[sc_name] = True
fp.write( "#endif\n" );
fp.write( "\n#endif\n" )
fp.write( "\n#endif /* _BIONIC_LINUX_SYSCALLS_H_ */\n" );
fp.close()
self.other_files.append( path )
# now dump the contents of syscalls.mk
def list_arch_syscalls(self, arch, syscalls_subdir):
# fp2 = open( syscalls_stamp, "w")
arch_test = {
"thumb": lambda x: x.has_key("asm-thumb"),
"arm": lambda x: x.has_key("asm-arm"),
"x86": lambda x: x.has_key("asm-x86"),
}
for sc in self.syscalls:
if arch_test[arch](sc):
print "%s/%s.S" % (syscalls_subdir,sc["func"])
#self.other_files.append( path )
# now generate each syscall stub
def gen_syscall_stubs(self, arch, top_builddir, syscalls_subdir):
for sc in self.syscalls:
if sc.has_key("asm-arm") and 'arm' == arch:
fname = "%s/%s.S" % (syscalls_subdir,sc["func"])
D2( ">>> generating "+fname )
fp = create_file( fname )
fp.write(sc["asm-arm"])
fp.close()
self.new_stubs.append( fname )
if sc.has_key("asm-thumb") and 'thumb' == arch:
fname = "%s/%s.S" % (syscalls_subdir,sc["func"])
D2( ">>> generating "+fname )
fp = create_file( fname )
fp.write(sc["asm-thumb"])
fp.close()
self.new_stubs.append( fname )
if sc.has_key("asm-x86") and 'x86' == arch:
fname = "%s/%s.S" % (syscalls_subdir,sc["func"])
D2( ">>> generating "+fname )
fp = create_file( fname )
fp.write(sc["asm-x86"])
fp.close()
self.new_stubs.append( fname )
def regenerate(self, arch, top_builddir, syscalls_subdir):
D( "scanning for existing architecture-specific stub files" )
bionic_root_len = len(bionic_root)
syscalls_path = top_builddir + "/" + syscalls_subdir
D( "scanning " + syscalls_path )
files = glob.glob( syscalls_path + "/*.S" )
for f in files:
self.old_stubs.append( f[bionic_root_len:].replace(arch + "-","") )
D( "found %d stub files" % len(self.old_stubs) )
if not os.path.exists( bionic_temp ):
D( "creating %s" % bionic_temp )
os.mkdir( bionic_temp )
# D( "p4 editing source files" )
# for arch in all_archs:
# commands.getoutput( "p4 edit " + arch + "/syscalls/*.S " )
# commands.getoutput( "p4 edit " + arch + "/syscalls.mk" )
# commands.getoutput( "p4 edit " + bionic_root + "include/sys/linux-syscalls.h" )
D( "re-generating stubs and support files" )
self.gen_linux_syscalls_h(top_builddir)
self.list_arch_syscalls(arch, syscalls_subdir)
self.gen_syscall_stubs(arch, top_builddir, syscalls_subdir)
for stub in self.new_stubs + self.other_files:
D( "stub is %s" % stub )
if not os.path.exists( stub ):
D( "new file: " + stub)
make_dir(os.path.dirname(stub))
shutil.copyfile( bionic_temp + "/" + stub, stub )
D_setlevel(0)
arch = sys.argv[1]
if arch == "thumb":
gen_thumb_stubs = 1
top_builddir = sys.argv[2]
syscalls_subdir = sys.argv[3]
state = State()
state.process_file(bionic_root+"SYSCALLS.TXT")
bionic_temp = top_builddir
state.regenerate(arch, top_builddir, syscalls_subdir)
| 29.751852 | 111 | 0.545749 | 2,089 | 16,066 | 4.008617 | 0.17999 | 0.015047 | 0.01839 | 0.012181 | 0.404466 | 0.347982 | 0.247313 | 0.216981 | 0.204801 | 0.174469 | 0 | 0.02521 | 0.333375 | 16,066 | 539 | 112 | 29.80705 | 0.756676 | 0.121001 | 0 | 0.409449 | 1 | 0 | 0.265262 | 0.014412 | 0.013123 | 0 | 0.000887 | 0 | 0 | 0 | null | null | 0 | 0.005249 | null | null | 0.010499 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c35873e9dc784012bb6835f55cb35fc8e5fc4307 | 145 | py | Python | config/parameters.py | NicolasEstrada/fitech | 4cc9c83c8a8bcf971a85e584a111e9b775e0a92f | [
"MIT"
] | null | null | null | config/parameters.py | NicolasEstrada/fitech | 4cc9c83c8a8bcf971a85e584a111e9b775e0a92f | [
"MIT"
] | 1 | 2017-12-27T06:53:54.000Z | 2017-12-27T06:53:54.000Z | config/parameters.py | NicolasEstrada/fitech | 4cc9c83c8a8bcf971a85e584a111e9b775e0a92f | [
"MIT"
] | null | null | null |
BASE_URL = "https://api.stlouisfed.org"
SERIES_ENDPOINT = "fred/series/observations"
SERIES = ["GDPC1", "UMCSENT", "UNRATE"]
FILE_TYPE = "json"
| 24.166667 | 44 | 0.710345 | 18 | 145 | 5.555556 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007692 | 0.103448 | 145 | 5 | 45 | 29 | 0.761538 | 0 | 0 | 0 | 0 | 0 | 0.5 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c358bd06e34c88e7988ad13f0b2a3a1cbd23a53d | 195 | py | Python | res/TensorFlowPythonExamples/examples/local_response_normalization/__init__.py | glistening/ONE-1 | cadf3a4da4f4340081862abbd3900af7c4b0e22d | [
"Apache-2.0"
] | null | null | null | res/TensorFlowPythonExamples/examples/local_response_normalization/__init__.py | glistening/ONE-1 | cadf3a4da4f4340081862abbd3900af7c4b0e22d | [
"Apache-2.0"
] | null | null | null | res/TensorFlowPythonExamples/examples/local_response_normalization/__init__.py | glistening/ONE-1 | cadf3a4da4f4340081862abbd3900af7c4b0e22d | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
tf.compat.v1.disable_eager_execution()
x_ = tf.compat.v1.placeholder(dtype=tf.float32, shape=(1, 4, 4, 20), name="Hole")
op_ = tf.compat.v1.nn.lrn(x_, 5, 1.0, 1.0, 0.5)
| 27.857143 | 81 | 0.692308 | 39 | 195 | 3.333333 | 0.615385 | 0.184615 | 0.230769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098266 | 0.112821 | 195 | 6 | 82 | 32.5 | 0.653179 | 0 | 0 | 0 | 0 | 0 | 0.020513 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c36c87a3633d7c68dd30754d6d5c2b471e37831a | 7,299 | py | Python | tests/quantizer_impl_test.py | julesmuhizi/qkeras | eec5a4a9f1930d0ee51319ab7363dd038a6e68c5 | [
"Apache-2.0"
] | 1 | 2020-12-29T00:52:14.000Z | 2020-12-29T00:52:14.000Z | tests/quantizer_impl_test.py | julesmuhizi/qkeras | eec5a4a9f1930d0ee51319ab7363dd038a6e68c5 | [
"Apache-2.0"
] | null | null | null | tests/quantizer_impl_test.py | julesmuhizi/qkeras | eec5a4a9f1930d0ee51319ab7363dd038a6e68c5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for methods in quantizer_impl.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import pytest
from tensorflow.keras.layers import *
from tensorflow.keras.models import *
from qkeras import *
from qkeras.qtools.quantized_operators import quantizer_impl
from qkeras import quantizers
from numpy.testing import assert_equal
# pylint: disable=invalid-name
def test_QuantizedBits():
qkeras_quantizer = quantizers.quantized_bits()
qtools_quantizer = quantizer_impl.QuantizedBits()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
symmetric=qkeras_quantizer.symmetric, alpha=qkeras_quantizer.alpha,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding,
scale_axis=qkeras_quantizer.scale_axis,
qnoise_factor=qkeras_quantizer.qnoise_factor)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_QuantizedTanh():
qkeras_quantizer = quantizers.quantized_tanh()
qtools_quantizer = quantizer_impl.QuantizedTanh()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
symmetric=qkeras_quantizer.symmetric,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_QuantizedUlaw():
qkeras_quantizer = quantizers.quantized_ulaw()
qtools_quantizer = quantizer_impl.QuantizedUlaw()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
symmetric=qkeras_quantizer.symmetric,
u=qkeras_quantizer.u)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_Binary():
qkeras_quantizer = quantizers.binary()
qtools_quantizer = quantizer_impl.Binary()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
alpha=qkeras_quantizer.alpha,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_StochasticBinary():
qkeras_quantizer = quantizers.stochastic_binary()
qtools_quantizer = quantizer_impl.StochasticBinary()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
alpha=qkeras_quantizer.alpha,
temperature=qkeras_quantizer.temperature,
use_real_sigmoid=qkeras_quantizer.use_real_sigmoid)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_Bernoulli():
qkeras_quantizer = quantizers.bernoulli()
qtools_quantizer = quantizer_impl.Bernoulli()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
alpha=qkeras_quantizer.alpha, temperature=qkeras_quantizer.temperature,
use_real_sigmoid=qkeras_quantizer.use_real_sigmoid)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_QuantizedRelu():
qkeras_quantizer = quantizers.quantized_relu()
qtools_quantizer = quantizer_impl.QuantizedRelu()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
use_sigmoid=qkeras_quantizer.use_sigmoid,
negative_slope=qkeras_quantizer.negative_slope,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding,
relu_upper_bound=qkeras_quantizer.relu_upper_bound,
is_quantized_clip=qkeras_quantizer.is_quantized_clip,
qnoise_factor=qkeras_quantizer.qnoise_factor)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_Ternary():
qkeras_quantizer = quantizers.ternary()
qtools_quantizer = quantizer_impl.Ternary()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
alpha=qkeras_quantizer.alpha, threshold=qkeras_quantizer.threshold,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding,
number_of_unrolls=qkeras_quantizer.number_of_unrolls)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_StochasticTernary():
qkeras_quantizer = quantizers.stochastic_ternary()
qtools_quantizer = quantizer_impl.StochasticTernary()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
alpha=qkeras_quantizer.alpha, threshold=qkeras_quantizer.threshold,
temperature=qkeras_quantizer.temperature,
use_real_sigmoid=qkeras_quantizer.use_real_sigmoid,
number_of_unrolls=qkeras_quantizer.number_of_unrolls)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_PowerOfTwo():
qkeras_quantizer = quantizers.quantized_po2()
qtools_quantizer = quantizer_impl.PowerOfTwo(is_signed=True)
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
negative_slope=None,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding,
quadratic_approximation=qkeras_quantizer.quadratic_approximation)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
def test_ReluPowerOfTwo():
qkeras_quantizer = quantizers.quantized_relu_po2()
qtools_quantizer = quantizer_impl.ReluPowerOfTwo()
qtools_quantizer.convert_qkeras_quantizer(qkeras_quantizer)
new_quantizer = qtools_quantizer.convert_to_qkeras_quantizer(
negative_slope=qkeras_quantizer.negative_slope,
use_stochastic_rounding=qkeras_quantizer.use_stochastic_rounding,
quadratic_approximation=qkeras_quantizer.quadratic_approximation)
result = new_quantizer.__dict__
for (key, val) in result.items():
assert_equal(val, qkeras_quantizer.__dict__[key])
if __name__ == "__main__":
pytest.main([__file__])
| 38.21466 | 80 | 0.798192 | 877 | 7,299 | 6.171038 | 0.166477 | 0.254989 | 0.089431 | 0.056911 | 0.682557 | 0.63156 | 0.63156 | 0.63156 | 0.621027 | 0.621027 | 0 | 0.001553 | 0.11755 | 7,299 | 190 | 81 | 38.415789 | 0.83869 | 0.095356 | 0 | 0.592593 | 0 | 0 | 0.001215 | 0 | 0 | 0 | 0 | 0 | 0.088889 | 1 | 0.081481 | false | 0 | 0.081481 | 0 | 0.162963 | 0.007407 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c36cb20ac15c2a448d50e42b8cff14decd118e98 | 1,450 | py | Python | tests/esp32/usertask_immediate_terminate.py | rodgergr/pycom-micropython-sigfox | 50a31befc40a39b1e4c3513f20da968792227b0e | [
"MIT"
] | null | null | null | tests/esp32/usertask_immediate_terminate.py | rodgergr/pycom-micropython-sigfox | 50a31befc40a39b1e4c3513f20da968792227b0e | [
"MIT"
] | null | null | null | tests/esp32/usertask_immediate_terminate.py | rodgergr/pycom-micropython-sigfox | 50a31befc40a39b1e4c3513f20da968792227b0e | [
"MIT"
] | 1 | 2019-09-22T01:28:52.000Z | 2019-09-22T01:28:52.000Z | import _thread
import time
from machine import Timer
import gc
def thread_function():
print("Thread is executing...")
return 0
def alarm_cb(self):
_thread.start_new_thread(thread_function, ())
#Need to call gc.collect() because when this test is executing usually not enough free memory exists as earlier tests have used it
gc.collect()
Timer.Alarm(handler=alarm_cb, ms=1000, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1100, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1200, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1300, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1400, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1500, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1600, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1700, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1800, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=1900, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=2000, periodic=False)
Timer.Alarm(handler=alarm_cb, ms=2100, periodic=False)
print("Starting while loop...")
timer = time.ticks_ms()
while time.ticks_ms() < timer + 3000: #runs for 3 seconds
pass # If the tasks started by the Alarm callback are not terminated immediatelly the device would run out of memory
# as the Idle Task does not have the chance to run and free the resources of the terminated tasks before new ones created
print("While loop has finished without exceptions!") | 42.647059 | 133 | 0.764828 | 230 | 1,450 | 4.730435 | 0.4 | 0.08364 | 0.1875 | 0.242647 | 0.418199 | 0.418199 | 0.394301 | 0.394301 | 0 | 0 | 0 | 0.042994 | 0.133793 | 1,450 | 34 | 134 | 42.647059 | 0.823248 | 0.26 | 0 | 0 | 0 | 0 | 0.081308 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0.037037 | 0.148148 | 0 | 0.259259 | 0.111111 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c36fcf35f939313b2cb6860c2c84b0c98ef28383 | 719 | py | Python | server_startup.py | Legion-Engine/Hivemind | 6511aba3a421ef06208aaf956ab06c81214f3c13 | [
"MIT"
] | null | null | null | server_startup.py | Legion-Engine/Hivemind | 6511aba3a421ef06208aaf956ab06c81214f3c13 | [
"MIT"
] | null | null | null | server_startup.py | Legion-Engine/Hivemind | 6511aba3a421ef06208aaf956ab06c81214f3c13 | [
"MIT"
] | null | null | null | from common.alive_helper import AliveHelper
from server.middleware import Middleware
from server.modules.scheduler_module import SchedulerModule
from server.modules.storage_module import StorageModule
from server.net import Server
# check client_startup.py for how this work
# the server is basically the same, but with different layers
# technically a module could be compatible for both server and client
if __name__ == "__main__":
modules = [SchedulerModule(),
StorageModule(),
AliveHelper()]
middleware = Middleware(modules=modules)
server = Server(middleware=middleware)
server.start()
while middleware.update():
pass
server.keep_alive = False
| 26.62963 | 69 | 0.739917 | 84 | 719 | 6.178571 | 0.559524 | 0.077071 | 0.065511 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.197497 | 719 | 26 | 70 | 27.653846 | 0.89948 | 0.235049 | 0 | 0 | 0 | 0 | 0.014652 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.066667 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
c3714281e6d767d47d51cf15ca3b55698cfd0d84 | 4,824 | py | Python | sheetwork/core/yaml/yaml_schema.py | bastienboutonnet/sheetwork | 7aa757ed12375ddd2c56502b721d91146d22b7ea | [
"MIT"
] | 9 | 2020-12-10T12:12:42.000Z | 2021-11-24T20:56:36.000Z | sheetwork/core/yaml/yaml_schema.py | bastienboutonnet/sheetwork | 7aa757ed12375ddd2c56502b721d91146d22b7ea | [
"MIT"
] | 266 | 2020-04-19T10:50:19.000Z | 2022-03-14T22:12:43.000Z | sheetwork/core/yaml/yaml_schema.py | bastienboutonnet/sheetwork | 7aa757ed12375ddd2c56502b721d91146d22b7ea | [
"MIT"
] | 3 | 2020-04-25T18:11:20.000Z | 2020-12-21T09:36:34.000Z | """Holds cerberus validation schemals for each yml parsed by any of the sheetwork system."""
config_schema = {
"sheets": {
"required": True,
"type": "list",
"schema": {
"type": "dict",
"schema": {
"sheet_name": {"required": True, "type": "string"},
"sheet_key": {"required": True, "type": "string"},
"worksheet": {"required": False, "type": "string"},
"target_schema": {"required": False, "type": "string"},
"target_table": {"required": True, "type": "string"},
"snake_case_camel": {"required": False, "type": "boolean"},
"columns": {
"type": "list",
"required": False,
"schema": {
"type": "dict",
"schema": {
"name": {
"required": True,
"type": "string",
"maxlength": 255,
},
"datatype": {
"required": True,
"type": "string",
"regex": "(?i)^(int|varchar|numeric|boolean|date|timestamp_ntz)$",
},
"identifier": {"required": False, "type": "string"},
},
},
},
"excluded_columns": {
"anyof_type": ["list", "string"],
"required": False,
"schema": {"type": "string"},
},
"included_columns": {
"anyof_type": ["list", "string"],
"required": False,
"schema": {"type": "string"},
},
"custom_column_name_cleanup": {
"type": "dict",
"required": False,
"schema": {
"default_replacement": {"type": "string", "required": False},
"characters_to_replace": {
"anyof_type": ["list", "string"],
"required": False,
"schema": {"type": "string"},
},
},
},
},
},
},
}
profiles_schema = {
"profiles": {
"required": True,
"type": "dict",
"valuesrules": {
"type": "dict",
"schema": {
"target": {"required": True, "type": "string"},
"outputs": {
"required": True,
"type": "dict",
"valuesrules": {
"type": "dict",
"schema": {
"db_type": {"required": True, "type": "string"},
"account": {"required": False, "type": "string"},
"user": {"required": True, "type": "string"},
"password": {"required": True, "type": "string"},
"host": {"required": False, "type": "string"},
"port": {"required": False, "type": "string"},
"role": {"required": False, "type": "string"},
"database": {"required": False, "type": "string"},
"warehouse": {"required": False, "type": "string"},
"schema": {"required": False, "type": "string"},
# ! new and prefered from v1.1.0
"target_schema": {"required": False, "type": "string"},
"guser": {"required": True, "type": "string"},
"is_service_account": {"required": False, "type": "boolean"},
},
},
},
},
},
}
}
project_schema = {
"name": {"required": True, "type": "string"},
"target_schema": {"required": False, "type": "string"},
"always_create": {"required": False, "type": "boolean"},
"always_create_table": {"required": False, "type": "boolean"},
"always_create_schema": {"required": False, "type": "boolean"},
"always_create_objects": {"required": False, "type": "boolean"},
"destructive_create_table": {"required": False, "type": "boolean"},
"paths": {
"type": "dict",
"required": False,
"schema": {
"profile_dir": {"required": False, "type": "string"},
"sheet_config_dir": {"required": False, "type": "string"},
},
},
}
| 41.586207 | 98 | 0.370647 | 312 | 4,824 | 5.608974 | 0.282051 | 0.165714 | 0.204 | 0.184 | 0.431429 | 0.328571 | 0.193143 | 0.193143 | 0.090286 | 0.062857 | 0 | 0.002306 | 0.460614 | 4,824 | 115 | 99 | 41.947826 | 0.670254 | 0.024461 | 0 | 0.381818 | 0 | 0 | 0.311702 | 0.031064 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.009091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c3786846c9e879d1b368ae35f565af9c116bc6eb | 2,921 | py | Python | tests/test_log.py | vivialconnect/vivialconnect-python | a7deaae0ffd470c681810e6d23c90bd7a0bbde11 | [
"MIT"
] | 1 | 2017-03-06T05:46:31.000Z | 2017-03-06T05:46:31.000Z | tests/test_log.py | vivialconnect/vivialconnect-python | a7deaae0ffd470c681810e6d23c90bd7a0bbde11 | [
"MIT"
] | 3 | 2017-04-11T14:08:48.000Z | 2017-08-30T15:25:43.000Z | tests/test_log.py | vivialconnect/vivialconnect-python | a7deaae0ffd470c681810e6d23c90bd7a0bbde11 | [
"MIT"
] | 2 | 2017-05-04T19:26:38.000Z | 2017-06-21T19:39:07.000Z | import vivialconnect
from tests.common import BaseTestCase, HTTMock
class LogTest(BaseTestCase):
def test_get_logs(self):
with HTTMock(
self.response_content,
body=self.load_fixture("log/log"),
headers={"Conent-type": "application/json"},
):
params = {"start_time": "20181101T145548Z", "end_time": "20181205T155548Z"}
last_key, logs = vivialconnect.Log.find(**params)
self.assertTrue(last_key != "")
# Check the amount of items
self.assertTrue(isinstance(logs, list))
self.assertEqual(len(logs), 16)
def test_get_aggregated_logs(self):
with HTTMock(
self.response_content,
body=self.load_fixture("log/log_aggregated"),
headers={"Conent-type": "application/json"},
):
params = {"start_time": "20181101T145548Z", "end_time": "20181205T155548Z"}
logs = vivialconnect.Log.get_aggregated_logs(**params)
self.assertIn("log_items", logs)
self.assertIn("last_key", logs)
# Check for keys in an log item
log_item = logs["log_items"][0]
self.assertIn("account_id", log_item)
self.assertIn("account_id_log_type", log_item)
self.assertIn("log_timestamp", log_item)
self.assertIn("aggregate_key", log_item)
self.assertIn("log_count", log_item)
self.assertIn("log_type", log_item)
# Check the amount of items
self.assertEqual(len(logs["log_items"]), 7)
def test_get_aggregated_logs_with_log_type(self):
with HTTMock(
self.response_content,
body=self.load_fixture("log/log_aggregated_log_type"),
headers={"Conent-type": "application/json"},
):
params = {
"start_time": "20181101T145548Z",
"end_time": "20181205T155548Z",
"optional_query_parameters": {"log_type": "user.login"},
}
logs = vivialconnect.Log.get_aggregated_logs(**params)
self.assertIn("log_items", logs)
self.assertIn("last_key", logs)
log_item = logs["log_items"][0]
# Check log item structure
self.assertIn("account_id", log_item)
self.assertIn("account_id_log_type", log_item)
self.assertIn("log_timestamp", log_item)
self.assertIn("aggregate_key", log_item)
self.assertIn("log_count", log_item)
self.assertIn("log_type", log_item)
# Check content
self.assertTrue("4", log_item["account_id"])
self.assertTrue("4-user.login", log_item["account_id_log_type"])
self.assertTrue("minutes", log_item["aggregate_key"])
self.assertTrue(201811281404, log_item["log_timestamp"])
self.assertTrue("user.login", log_item["log_type"])
# Check the amount of items
self.assertEqual(len(logs["log_items"]), 2)
| 41.728571 | 87 | 0.62102 | 336 | 2,921 | 5.142857 | 0.208333 | 0.085069 | 0.063657 | 0.109954 | 0.703125 | 0.677662 | 0.640046 | 0.640046 | 0.640046 | 0.640046 | 0 | 0.047641 | 0.252653 | 2,921 | 69 | 88 | 42.333333 | 0.74393 | 0.049983 | 0 | 0.576271 | 0 | 0 | 0.228685 | 0.018786 | 0 | 0 | 0 | 0 | 0.440678 | 1 | 0.050847 | false | 0 | 0.033898 | 0 | 0.101695 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
c37aca719ec5903d780a1008118d47c1e1ad6e7b | 1,608 | py | Python | libs/enums.py | paulirish/covid-data-model | b93ae5d598b8378f9c1f2698e3162f87136cde74 | [
"MIT"
] | 1 | 2020-04-25T02:51:26.000Z | 2020-04-25T02:51:26.000Z | libs/enums.py | paulirish/covid-data-model | b93ae5d598b8378f9c1f2698e3162f87136cde74 | [
"MIT"
] | null | null | null | libs/enums.py | paulirish/covid-data-model | b93ae5d598b8378f9c1f2698e3162f87136cde74 | [
"MIT"
] | null | null | null | import enum
# Fips code chosen for all unknown fips values.
# TODO: This should maybe be unique per state.
UNKNOWN_FIPS = "99999"
class Intervention(enum.Enum):
NO_MITIGATION = 0
HIGH_MITIGATION = 1 # on the webiste, strictDistancingNow
MODERATE_MITIGATION = 3 # weak distancingNow on the website
SELECTED_MITIGATION = 4 # look at what the state is and get the file for that
# We are using enum 2 for consistency with the website
OBSERVED_MITIGATION = 2 # given the previous pattern, how do we predict going forward
@classmethod
def county_supported_interventions(cls):
return [
Intervention.NO_MITIGATION,
Intervention.HIGH_MITIGATION,
Intervention.MODERATE_MITIGATION,
Intervention.SELECTED_MITIGATION,
]
@classmethod
def from_webui_data_adaptor(cls, label):
if label == "suppression_policy__no_intervention":
return cls.NO_MITIGATION
elif label == "suppression_policy__flatten_the_curve":
return cls.HIGH_MITIGATION
elif label == "suppression_policy__inferred":
return cls.OBSERVED_MITIGATION
elif label == "suppression_policy__social_distancing":
return cls.MODERATE_MITIGATION
raise Exception(f"Unexpected WebUI Data Adaptor label: {label}")
@classmethod
def from_str(cls, label):
if label == "shelter_in_place":
return cls.HIGH_MITIGATION
elif label == "social_distancing":
return cls.MODERATE_MITIGATION
else:
return cls.NO_MITIGATION
| 36.545455 | 89 | 0.683458 | 185 | 1,608 | 5.713514 | 0.481081 | 0.059603 | 0.083254 | 0.085147 | 0.226112 | 0.141911 | 0 | 0 | 0 | 0 | 0 | 0.009228 | 0.258706 | 1,608 | 43 | 90 | 37.395349 | 0.877517 | 0.202114 | 0 | 0.257143 | 0 | 0 | 0.171765 | 0.107451 | 0 | 0 | 0 | 0.023256 | 0 | 1 | 0.085714 | false | 0 | 0.028571 | 0.028571 | 0.514286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
c3813a33a7840188b4b6d3b7a7e1db8f37392fef | 522 | py | Python | contrib/redis_count.py | hiidef/hiispider | ba2b55cc72261cd360935992ab2e8196ac6a1278 | [
"MIT"
] | 2 | 2015-08-30T02:38:09.000Z | 2019-11-12T20:52:41.000Z | contrib/redis_count.py | hiidef/hiispider | ba2b55cc72261cd360935992ab2e8196ac6a1278 | [
"MIT"
] | null | null | null | contrib/redis_count.py | hiidef/hiispider | ba2b55cc72261cd360935992ab2e8196ac6a1278 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
import redis
r = redis.Redis(host='127.0.0.1', port=6379, db=0)
num_negative_cache_keys = len(r.keys('negative_cache:*'))
num_negative_req_cache_keys = len(r.keys('negative_req_cache:*'))
num_all_keys = len(r.keys('*'))
num_account_cache_keys = num_all_keys - \
num_negative_cache_keys - \
num_negative_req_cache_keys
print('nc: %d, nrc: %d, ac: %d, t: %d' % (
num_negative_cache_keys,
num_negative_req_cache_keys,
num_account_cache_keys,
num_all_keys,
))
| 27.473684 | 65 | 0.714559 | 88 | 522 | 3.829545 | 0.352273 | 0.21365 | 0.178042 | 0.178042 | 0.62908 | 0.587537 | 0.439169 | 0.439169 | 0.255193 | 0 | 0 | 0.026608 | 0.136015 | 522 | 18 | 66 | 29 | 0.720621 | 0.068966 | 0 | 0 | 0 | 0 | 0.157025 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0.071429 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5edf5b0c39c1fd35a59b8061621187ee9c8d9a77 | 1,344 | py | Python | questions/migrations/0005_auto_20210108_1820.py | Shirhussain/Find-my-match | f11f64ebb0f2145d8cd9a352bd2c29bf30c85088 | [
"MIT"
] | null | null | null | questions/migrations/0005_auto_20210108_1820.py | Shirhussain/Find-my-match | f11f64ebb0f2145d8cd9a352bd2c29bf30c85088 | [
"MIT"
] | null | null | null | questions/migrations/0005_auto_20210108_1820.py | Shirhussain/Find-my-match | f11f64ebb0f2145d8cd9a352bd2c29bf30c85088 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.5 on 2021-01-08 18:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questions', '0004_auto_20210108_1325'),
]
operations = [
migrations.AddField(
model_name='useranswer',
name='my_points',
field=models.IntegerField(default=-1, verbose_name='My points'),
),
migrations.AddField(
model_name='useranswer',
name='their_points',
field=models.IntegerField(default=-1, verbose_name='My points'),
),
migrations.AlterField(
model_name='useranswer',
name='my_answer_importance',
field=models.CharField(choices=[('Mandatory', 'Mandatory'), ('Very Important', 'very important'), ('Somewhat Important', 'somewhat important'), ('Not important', 'not important')], max_length=50, verbose_name='My Answer Importance'),
),
migrations.AlterField(
model_name='useranswer',
name='their_answer_importance',
field=models.CharField(choices=[('Mandatory', 'Mandatory'), ('Very Important', 'very important'), ('Somewhat Important', 'somewhat important'), ('Not important', 'not important')], max_length=50, verbose_name='Their answer importants'),
),
]
| 39.529412 | 248 | 0.623512 | 133 | 1,344 | 6.157895 | 0.383459 | 0.03663 | 0.092796 | 0.112332 | 0.766789 | 0.749695 | 0.568987 | 0.568987 | 0.568987 | 0.568987 | 0 | 0.036133 | 0.238095 | 1,344 | 33 | 249 | 40.727273 | 0.763672 | 0.033482 | 0 | 0.518519 | 1 | 0 | 0.318427 | 0.035466 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.185185 | 0 | 0.296296 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5ee759c1ab04fb4239ea54df8bd2baf2821c95df | 15,732 | py | Python | pysnmp-with-texts/RAPID-HA-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/RAPID-HA-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/RAPID-HA-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module RAPID-HA-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RAPID-HA-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:51:59 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion")
rapidstream, = mibBuilder.importSymbols("RAPID-MIB", "rapidstream")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibIdentifier, Bits, Integer32, Unsigned32, NotificationType, enterprises, Counter64, iso, Counter32, ModuleIdentity, TimeTicks, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Bits", "Integer32", "Unsigned32", "NotificationType", "enterprises", "Counter64", "iso", "Counter32", "ModuleIdentity", "TimeTicks", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Gauge32")
DateAndTime, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "DateAndTime", "TextualConvention", "DisplayString")
rsInfoModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 4355, 6))
rsInfoModule.setRevisions(('2002-11-01 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: rsInfoModule.setRevisionsDescriptions(('Initial revision.',))
if mibBuilder.loadTexts: rsInfoModule.setLastUpdated('0211011200Z')
if mibBuilder.loadTexts: rsInfoModule.setOrganization('WatchGuard Technologies, Inc.')
if mibBuilder.loadTexts: rsInfoModule.setContactInfo(' Ella Yu WatchGuard Technologies, Inc. 1841 Zanker Road San Jose, CA 95112 USA 408-519-4888 ella.yu@watchguard.com ')
if mibBuilder.loadTexts: rsInfoModule.setDescription('The MIB module describes general information of RapidStream system. Mainly, the information obtained from this MIB is used by rsInfoSystemMIB, rsClientMIB, rsSystemStatisticsMIB, rsIpsecTunnelMIB, rsHAMIB.')
rsHAMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6))
if mibBuilder.loadTexts: rsHAMIB.setStatus('current')
if mibBuilder.loadTexts: rsHAMIB.setDescription('This is the base object identifier for all HA related branches.')
rsHALocal = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1))
if mibBuilder.loadTexts: rsHALocal.setStatus('current')
if mibBuilder.loadTexts: rsHALocal.setDescription('This is the base object identifier for all objects which are belong to local appliance.')
rsHAPeer = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2))
if mibBuilder.loadTexts: rsHAPeer.setStatus('current')
if mibBuilder.loadTexts: rsHAPeer.setDescription('This is the base object identifier for all objects which are belong to peer appliance.')
rsHAStatus = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("disabled", 0), ("unknown", 1), ("as-primary-active", 2), ("as-secondary-active", 3), ("aa-primary-ative", 4), ("aa-secondary-active", 5), ("aa-primary-takeover", 6), ("aa-secondary-takeover", 7), ("standby", 8), ("admin", 9), ("failed", 10), ("unavailable", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAStatus.setStatus('current')
if mibBuilder.loadTexts: rsHAStatus.setDescription("Indicates current status of local appliance. disabled: The local appliance of HA system is not enabled. unknown: The local appliance of HA system is in initialization as-primary-active: The local appliance that is the primary appliance of HA/AS system is in active mode. This status is also called MASTER in some systems. as-secondary-active: The local appliance that is the secondary appliance of HA/AS system is in active mode. This status is also called BACKUP in some systems. aa-primary-ative: The local appliance that is the primary appliance of HA/AA system is in active mode. aa-secondary-active: The local appliance that is the secondary appliance of HA/AA system is in active mode. aa-primary-takeover: The local appliance that is the primary appliance of HA/AA system has taken over the peer's duty. aa-secondary-takeover: The local appliance of the secondary appliance of HA/AA system has taken over the peer's duty. standby: The local appliance of HA/AS system is in standby mode. admin: The local appliance of HA system detects an mismatched configuration and waits for system administrator to reslove the conflict. failed: The local appliance of the HA system is down due to forced failover or other reasons. unavailable: It's reported when local appliance of HA system is unabled to get status information. ")
rsHAPeerStatus = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("unavailable", 0), ("active", 1), ("standby", 2), ("admin", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerStatus.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerStatus.setDescription("Indicates current status of peer appliance. unavailable: It's reported when peer appliance of HA system is unabled to get status information. active: The peer applicance of HA system is in active mode. standby: The peer applicance of HA system is in standby mode. admin: The peer applicance of HA system dectects an mismatched configuration and waits for system administrator to reslove the conflict. failed: The peer appliance of HA system is down due to forced failover or other reasons. ")
rsHALastDBSyncTime = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 3), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHALastDBSyncTime.setStatus('current')
if mibBuilder.loadTexts: rsHALastDBSyncTime.setDescription('The last DB synchronized time of local appliance.')
rsHAError = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-error", 0), ("mismatched-ha-id", 1), ("mismatched-software", 2), ("mismatched-database", 3), ("mismatched-hardware", 4), ("forced-fail", 5), ("invalid-ha-role", 6), ("link-down", 7), ("lost-mia-heartbeat", 8), ("mia-not-responding", 9), ("admin-command-failed", 10), ("detect-ha-error", 11), ("unavailable", 12), ("hotsync-failed", 13), ("config-sync-failed", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAError.setStatus('current')
if mibBuilder.loadTexts: rsHAError.setDescription('Reports the current error that occurred in local appliance .')
rsHAPeerError = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-error", 0), ("mismatched-ha-id", 1), ("mismatched-software", 2), ("mismatched-database", 3), ("mismatched-hardware", 4), ("forced-fail", 5), ("invalid-ha-role", 6), ("link-down", 7), ("lost-mia-heartbeat", 8), ("mia-not-responding", 9), ("admin-command-failed", 10), ("detect-ha-error", 11), ("unavailable", 12), ("hotsync-failed", 13), ("config-sync-failed", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerError.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerError.setDescription('Reports the current error that occurred in peer appliance.')
rsHAPeerSerialNumber = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSerialNumber.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSerialNumber.setDescription('The serial number of peer appliance.')
rsHAPeerLastDBSyncTime = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 2), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerLastDBSyncTime.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerLastDBSyncTime.setDescription('The last DB synchronized time of peer appliance.')
rsHAPeerDevice = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3))
if mibBuilder.loadTexts: rsHAPeerDevice.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerDevice.setDescription('This is the base object for parameters and configuration data of devices in this entity.')
rsHAPeerCounters = ObjectIdentity((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4))
if mibBuilder.loadTexts: rsHAPeerCounters.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerCounters.setDescription('This is the base object for parameters and configuration data of devices in this entity.')
rsHAPeerIfNumber = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerIfNumber.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerIfNumber.setDescription('The number of RapidCard installed in this entity.')
rsHAPeerIfTable = MibTable((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2), )
if mibBuilder.loadTexts: rsHAPeerIfTable.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerIfTable.setDescription('A list of RapidCard entries. The number of entries is given by the value of rsHAPeerDeviceNumber.')
rsHAPeerIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1), ).setIndexNames((0, "RAPID-HA-MIB", "rsHAPeerIfIndex"))
if mibBuilder.loadTexts: rsHAPeerIfEntry.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerIfEntry.setDescription('A RapidCard entry containing objects for a particular RapidCard.')
rsHAPeerIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerIfIndex.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerIfIndex.setDescription('The unique value for each interface.')
rsHAPeerIfIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerIfIpAddr.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerIfIpAddr.setDescription('The ip address of the interface.')
rsHAPeerIfLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 3, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("down", 0), ("up", 1), ("other", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerIfLinkStatus.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerIfLinkStatus.setDescription('The current state of the interface.')
rsHAPeerSystemCpuUtil = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil.setDescription('The CPU utilization of the peer system in last 5 seconds.')
rsHAPeerSystemTotalSendBytes = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemTotalSendBytes.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemTotalSendBytes.setDescription('The total number of bytes sent since peer system is up.')
rsHAPeerSystemTotalRecvBytes = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvBytes.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvBytes.setDescription('The total number of bytes received since peer system is up.')
rsHAPeerSystemTotalSendPackets = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemTotalSendPackets.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemTotalSendPackets.setDescription('The total number of packets sent since peer system is up.')
rsHAPeerSystemTotalRecvPackets = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvPackets.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemTotalRecvPackets.setDescription('The total number of packets received since peer system is up.')
rsHAPeerSystemStreamReqTotal = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemStreamReqTotal.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemStreamReqTotal.setDescription('The total number of the connection requests since system is up.')
rsHAPeerSystemStreamReqDrop = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemStreamReqDrop.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemStreamReqDrop.setDescription('The total number of the connection requests being dropped since system is up.')
rsHAPeerSystemCurrIpsecTunnels = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemCurrIpsecTunnels.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemCurrIpsecTunnels.setDescription('The number of ipsec tunnels in the peer system currently.')
rsHAPeerSystemCpuUtil1 = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil1.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil1.setDescription('The CPU utilization of the peer system in last 1 minute.')
rsHAPeerSystemCpuUtil5 = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil5.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil5.setDescription('The CPU utilization of the peer system in last 5 minutes.')
rsHAPeerSystemCpuUtil15 = MibScalar((1, 3, 6, 1, 4, 1, 4355, 6, 6, 2, 4, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil15.setStatus('current')
if mibBuilder.loadTexts: rsHAPeerSystemCpuUtil15.setDescription('The CPU utilization of the peer system in last 15 minutes.')
mibBuilder.exportSymbols("RAPID-HA-MIB", rsHAPeerIfNumber=rsHAPeerIfNumber, rsHALocal=rsHALocal, rsHAPeerIfIndex=rsHAPeerIfIndex, rsHAError=rsHAError, rsHAPeerLastDBSyncTime=rsHAPeerLastDBSyncTime, rsInfoModule=rsInfoModule, rsHALastDBSyncTime=rsHALastDBSyncTime, rsHAPeerDevice=rsHAPeerDevice, rsHAPeerSystemStreamReqTotal=rsHAPeerSystemStreamReqTotal, rsHAPeerSystemCpuUtil1=rsHAPeerSystemCpuUtil1, rsHAPeerSystemCpuUtil5=rsHAPeerSystemCpuUtil5, rsHAPeerSystemCpuUtil=rsHAPeerSystemCpuUtil, rsHAPeerStatus=rsHAPeerStatus, rsHAPeer=rsHAPeer, rsHAPeerSystemCurrIpsecTunnels=rsHAPeerSystemCurrIpsecTunnels, rsHAMIB=rsHAMIB, rsHAPeerSystemTotalSendBytes=rsHAPeerSystemTotalSendBytes, rsHAPeerCounters=rsHAPeerCounters, rsHAPeerIfIpAddr=rsHAPeerIfIpAddr, rsHAPeerIfEntry=rsHAPeerIfEntry, rsHAStatus=rsHAStatus, rsHAPeerError=rsHAPeerError, rsHAPeerIfLinkStatus=rsHAPeerIfLinkStatus, PYSNMP_MODULE_ID=rsInfoModule, rsHAPeerSystemCpuUtil15=rsHAPeerSystemCpuUtil15, rsHAPeerSystemTotalRecvBytes=rsHAPeerSystemTotalRecvBytes, rsHAPeerSystemStreamReqDrop=rsHAPeerSystemStreamReqDrop, rsHAPeerSerialNumber=rsHAPeerSerialNumber, rsHAPeerSystemTotalSendPackets=rsHAPeerSystemTotalSendPackets, rsHAPeerSystemTotalRecvPackets=rsHAPeerSystemTotalRecvPackets, rsHAPeerIfTable=rsHAPeerIfTable)
| 140.464286 | 1,377 | 0.776824 | 1,948 | 15,732 | 6.272587 | 0.165298 | 0.061871 | 0.108274 | 0.009821 | 0.545135 | 0.409117 | 0.330387 | 0.303789 | 0.2758 | 0.27179 | 0 | 0.052072 | 0.096682 | 15,732 | 111 | 1,378 | 141.72973 | 0.807755 | 0.020214 | 0 | 0 | 0 | 0.038835 | 0.35099 | 0.009867 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.067961 | 0 | 0.067961 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5ef55dd1f2b5c3ab7be825a034e64075276cdb95 | 707 | py | Python | proj3_otter/tests/q1c.py | ioneone/UCLA-CS-188-Data-Science-Fundamentals | 1bab9eb9ffe1c616ca807b02866056572dfadaa3 | [
"MIT"
] | 1 | 2021-01-05T04:26:41.000Z | 2021-01-05T04:26:41.000Z | proj3_otter/tests/q1c.py | ioneone/UCLA-CS-188-Data-Science-Fundamentals | 1bab9eb9ffe1c616ca807b02866056572dfadaa3 | [
"MIT"
] | null | null | null | proj3_otter/tests/q1c.py | ioneone/UCLA-CS-188-Data-Science-Fundamentals | 1bab9eb9ffe1c616ca807b02866056572dfadaa3 | [
"MIT"
] | 1 | 2020-12-12T07:11:31.000Z | 2020-12-12T07:11:31.000Z | test = {
'name': 'q1c',
'points': 3,
'suites': [
{
'cases': [
{
'code': r"""
>>> manhattan_taxi.shape
(82800, 9)
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum(manhattan_taxi['duration'])
54551565
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> manhattan_taxi.iloc[0,:]['duration']
981
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| 18.128205 | 50 | 0.328147 | 47 | 707 | 4.87234 | 0.617021 | 0.065502 | 0.222707 | 0.28821 | 0.235808 | 0.235808 | 0 | 0 | 0 | 0 | 0 | 0.054645 | 0.48232 | 707 | 38 | 51 | 18.605263 | 0.571038 | 0 | 0 | 0.315789 | 0 | 0 | 0.454031 | 0.094767 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5efa075b6de3b91a3fcf2deb550e094edfc0ed97 | 21,926 | py | Python | Chess_Bot/cogs/Misc.py | phuang1024/Chess_Bot | dec0746c2db2bd6f8cc83671345fb1331344c863 | [
"MIT"
] | 2 | 2021-05-07T16:30:25.000Z | 2021-05-10T16:02:48.000Z | Chess_Bot/cogs/Misc.py | phuang1024/Chess_Bot | dec0746c2db2bd6f8cc83671345fb1331344c863 | [
"MIT"
] | 3 | 2021-05-07T14:08:11.000Z | 2021-10-21T03:29:56.000Z | Chess_Bot/cogs/Misc.py | phuang1024/Chess_Bot | dec0746c2db2bd6f8cc83671345fb1331344c863 | [
"MIT"
] | 2 | 2021-08-24T03:25:17.000Z | 2022-01-27T06:08:38.000Z | import discord
from discord.ext import commands
from discord_slash import SlashContext
from discord_slash import cog_ext
from discord_slash.model import SlashCommandOptionType
from discord_slash.utils.manage_commands import create_option
import time
import typing
import logging
import sys
import Chess_Bot.util.Utility as util
import Chess_Bot.util.Data as data
from Chess_Bot.cogs.Profiles import Profile
from Chess_Bot.cogs import Profiles as profiles
from Chess_Bot import constants
version = '3.2.1'
class CachedUsernames:
def __init__(self, client):
self.client = client
self.cache = {}
async def get_username(self, id):
if id in self.cache.keys() and self.cache[id][1] >= time.time():
return self.cache[id][0]
name = str(await self.client.fetch_user(id))
if len(name) > 30:
name = name[:27] + '...'
self.cache[id] = (name, time.time() + constants.CACHE_REFRESH_TIME)
return name
class Misc(commands.Cog):
def __init__(self, client):
self.client = client
self.start_time = time.time()
self.cache = CachedUsernames(client)
@commands.Cog.listener()
async def on_ready(self):
game = discord.Game("chess")
await self.client.change_presence(activity=game)
status_channel = await self.client.fetch_channel(constants.STATUS_CHANNEL_ID)
if not '-beta' in sys.argv:
await status_channel.send(f'Chess Bot has just restarted. Version: {version}')
else:
logging.info('Using beta version.')
logging.info(f'Bot is ready. Logged in as {self.client.user}')
@commands.command()
@commands.cooldown(1, 3, commands.BucketType.user)
async def ping(self, ctx):
'''
{
"name": "ping",
"description": "Sends the bot's latency.\\nAccording to the discord.py docs:\\nMeasures latency between a `HEARTBEAT` and a `HEARTBEAT_ACK` in seconds.\\nThis could be referred to as the Discord WebSocket protocol latency.",
"usage": "$ping",
"examples": [
"$ping"
],
"cooldown": 3
}
'''
await ctx.send(f'Pong!\nLatency: {round(self.client.latency * 1000, 3)}ms')
@cog_ext.cog_slash(name='ping', description='Is the bot online?')
async def _ping(self, ctx: SlashContext):
await ctx.send(f'Pong!\nLatency: {round(self.client.latency * 1000, 3)}ms')
@commands.command()
@commands.cooldown(1, 3, commands.BucketType.user)
async def rating(self, ctx, person: typing.Union[discord.User, discord.Member] = None):
'''
{
"name": "rating",
"description": "Tells you a person's rating.\\nIf no person is specified, it defaults to your rating.",
"usage": "$rating [user]",
"examples": [
"$rating",
"$rating @person"
],
"cooldown": 3
}
'''
if person is None:
person = ctx.author
result = data.data_manager.get_rating(person.id)
if result == None:
person_str = 'You are' if person == ctx.author else f'{person} is'
await ctx.send(f'{person_str} unrated.')
else:
person_str = 'Your' if person == ctx.author else f'{person}\''
await ctx.send(f'{person_str} rating is {round(result, 2)}')
@cog_ext.cog_slash(name='rating', description='Find out the rating of some user.', options=[
create_option(name='person', description='The user who you want to find the rating of.',
option_type=SlashCommandOptionType.USER, required=False)
])
async def _rating(self, ctx: SlashContext, person: typing.Union[discord.User, discord.Member] = None):
if person is None:
person = ctx.author
result = data.data_manager.get_rating(person.id)
if result == None:
person_str = 'You are' if person == ctx.author else f'{person} is'
await ctx.send(f'{person_str} unrated.')
else:
person_str = 'Your' if person == ctx.author else f'{person}\''
await ctx.send(f'{person_str} rating is {round(result, 2)}')
@commands.command(aliases=['top'])
@commands.cooldown(1, 7, commands.BucketType.user)
async def leaderboard(self, ctx, num='-1'):
'''
{
"name": "leaderboard",
"description": "Sends a list of [number] highest rated players.\\nIf a number is not specified, it will default to 10.\\nYou can also enter \\"all\\" for all rated players, or \\"bots\\" for all bots.\\nRight now, the leaderboard can hold a maximum of 40 people.",
"aliases": [
"top"
],
"usage": "$leaderboard [number]",
"examples": [
"$leaderboard",
"$leaderboard 13",
"$leaderboard all",
"$leaderboard bots"
],
"cooldown": 7
}
'''
embed = discord.Embed(title="Leaderboard", color=0xffb521)
ratings = data.data_manager.get_ratings()
all_players = []
if num == 'bots' or num == 'bot':
for bot in Profile:
all_players.append((bot.value, ratings[bot.value]))
all_players.sort(reverse=True, key=lambda a: a[1])
embed.set_footer(text='Top rated bots')
else:
number = constants.DEFAULT_LEADERBOARD_SIZE
if num == 'all' or num == 'max':
number = min(constants.MAX_LEADERBOARD_SIZE,
len(ratings.keys()))
elif num == '-1':
number = min(constants.DEFAULT_LEADERBOARD_SIZE,
len(ratings.keys()))
else:
try:
number = int(num)
assert(1 <= number <= constants.MAX_LEADERBOARD_SIZE)
except (ValueError, AssertionError):
await ctx.send(f'Please enter an integer from 1 to {constants.MAX_LEADERBOARD_SIZE}.')
return
if number > len(ratings.keys()):
await ctx.send('There aren\'t even that many rated players.')
return
if number > constants.MAX_LEADERBOARD_SIZE:
await ctx.send(f'The leaderboard can hold a max of {constants.MAX_LEADERBOARD_SIZE} people.')
return
embed.set_footer(text=f'Top {number} rated players')
for k in ratings.keys():
if k in constants.LEADERBOARD_IGNORE:
continue
all_players.append((k, ratings[k]))
all_players.sort(reverse=True, key=lambda a: a[1])
all_players = all_players[:number]
rows = []
for i, person in enumerate(all_players):
if person[0] < len(Profile):
rows.append(
(i + 1, profiles.get_name(person[0]), round(person[1], 2)))
else:
rows.append((i + 1, await self.cache.get_username(person[0]), round(person[1], 2)))
length1 = 0
length2 = 0
length3 = 0
for i in rows:
length1 = max(length1, len(str(i[0])))
length2 = max(length2, len(str(i[1])))
length3 = max(length3, len(str(i[2])))
for ind, i in enumerate(rows):
rows[ind] = (' ' * (length1 - len(str(i[0]))) + str(i[0]), ' ' *
(length2 - len(i[1])) + i[1], ' ' * (length3 - len(str(i[2]))) + str(i[2]))
embed.description = '```\n' + \
'\n'.join([f'{i[0]}: {i[1]} ({i[2]})' for i in rows]) + '\n```'
embed.set_thumbnail(url=constants.AVATAR_URL)
await ctx.send(embed=embed)
@cog_ext.cog_slash(name='leaderboard', description='Sends a list of top rated players', options=[
create_option(name='number', description='The number of players to include. You can also use "max" for the maximum number.',
option_type=SlashCommandOptionType.STRING, required=False),
create_option(name='bots', description='Whether you want to include only bots or not.',
option_type=SlashCommandOptionType.BOOLEAN, required=False)
])
async def _leaderboard(self, ctx: SlashContext, number='-1', bots=False):
if number is None:
number = '-1'
if bots is not None and bots:
number = 'bots'
embed = discord.Embed(title="Leaderboard", color=0xffb521)
ratings = data.data_manager.get_ratings()
all_players = []
if number == 'bots' or number == 'bot':
for bot in Profile:
all_players.append((bot.value, ratings[bot.value]))
all_players.sort(reverse=True, key=lambda a: a[1])
embed.set_footer(text='Top rated bots')
else:
if number == 'all' or number == 'max':
number = min(constants.MAX_LEADERBOARD_SIZE,
len(ratings.keys()))
elif number == '-1':
number = min(constants.DEFAULT_LEADERBOARD_SIZE,
len(ratings.keys()))
else:
try:
number = int(number)
assert(1 <= number <= constants.MAX_LEADERBOARD_SIZE)
except (ValueError, AssertionError):
await ctx.send(f'Please enter an integer from 1 to {constants.MAX_LEADERBOARD_SIZE}.')
return
if number > len(ratings.keys()):
await ctx.send('There aren\'t even that many rated players.')
return
if number > constants.MAX_LEADERBOARD_SIZE:
await ctx.send(f'The leaderboard can hold a max of {constants.MAX_LEADERBOARD_SIZE} people.')
return
embed.set_footer(text=f'Top {number} rated players')
for k in ratings.keys():
if k in constants.LEADERBOARD_IGNORE:
continue
all_players.append((k, ratings[k]))
all_players.sort(reverse=True, key=lambda a: a[1])
all_players = all_players[:number]
rows = []
for i, person in enumerate(all_players):
if person[0] < len(Profile):
rows.append(
(i + 1, profiles.get_name(person[0]), round(person[1], 2)))
else:
rows.append((i + 1, await self.cache.get_username(person[0]), round(person[1], 2)))
length1 = 0
length2 = 0
length3 = 0
for i in rows:
length1 = max(length1, len(str(i[0])))
length2 = max(length2, len(str(i[1])))
length3 = max(length3, len(str(i[2])))
for ind, i in enumerate(rows):
rows[ind] = (' ' * (length1 - len(str(i[0]))) + str(i[0]), ' ' *
(length2 - len(i[1])) + i[1], ' ' * (length3 - len(str(i[2]))) + str(i[2]))
embed.description = '```\n' + \
'\n'.join([f'{i[0]}: {i[1]} ({i[2]})' for i in rows]) + '\n```'
embed.set_thumbnail(url=constants.AVATAR_URL)
await ctx.send(embed=embed)
@commands.command()
@commands.cooldown(1, 7, commands.BucketType.user)
async def rank(self, ctx):
'''
{
"name": "rank",
"description": "Tells you your rank among all rated players.",
"usage": "$rank",
"examples": [
"$rank"
],
"cooldown": 7
}
'''
if data.data_manager.get_rating(ctx.author.id) is None:
await ctx.send('You are unrated.')
return
ratings = data.data_manager.get_ratings()
all_players = []
for k in ratings.keys():
if k in constants.LEADERBOARD_IGNORE:
continue
all_players.append((k, ratings[k]))
all_players.sort(reverse=True, key=lambda a: a[1])
rank = None
for i in range(len(all_players)):
if all_players[i][0] == ctx.author.id:
rank = i + 1
break
await ctx.send(f'Your rating is {round(data.data_manager.get_rating(ctx.author.id), 2)}. You are ranked {rank} out of {len(all_players)} players.')
@cog_ext.cog_slash(name='rank', description='Shows your rank among rated players.')
async def _rank(self, ctx: SlashContext):
if data.data_manager.get_rating(ctx.author.id) is None:
await ctx.send('You are unrated.')
return
ratings = data.data_manager.get_ratings()
all_players = []
for k in ratings.keys():
if k in constants.LEADERBOARD_IGNORE:
continue
all_players.append((k, ratings[k]))
all_players.sort(reverse=True, key=lambda a: a[1])
rank = None
for i in range(len(all_players)):
if all_players[i][0] == ctx.author.id:
rank = i + 1
break
await ctx.send(f'Your rating is {round(data.data_manager.get_rating(ctx.author.id), 2)}. You are ranked {rank} out of {len(all_players)} players.')
@commands.command(aliases=['info'])
@commands.cooldown(1, 3, commands.BucketType.user)
async def botinfo(self, ctx):
'''
{
"name": "botinfo",
"description": "Sends some info and stats about the bot.\\nUse `$help` for a list of commands.",
"aliases": [
"info"
],
"usage": "$botinfo",
"examples": [
"$botinfo"
],
"cooldown": 3
}
'''
embed = discord.Embed(title="Bot Info", color=0xff0000)
embed.add_field(name="Links",
value=f"[Github]({constants.GITHUB_LINK}) | [Invite]({constants.INVITE_LINK}) | [Join the discord server]({constants.SUPPORT_SERVER_INVITE}) | [Top.gg]({constants.TOPGG_LINK})",
inline=False)
embed.add_field(name='Version', value=version, inline=True)
embed.add_field(name="Info",
value='Chess Bot is a bot that plays chess. Use `$help` for a list of commands.', inline=False)
users = 0
for guild in self.client.guilds:
users += guild.member_count
embed.add_field(name="Stats", value="Stats", inline=False)
embed.add_field(name="Server Count", value=str(
len(self.client.guilds)), inline=True)
embed.add_field(name="Member Count", value=str(users), inline=True)
embed.add_field(
name="Up time", value=f'{util.pretty_time(time.time() - self.start_time)}', inline=True)
embed.add_field(name='Games in progress', value=str(
len(data.data_manager.get_games())), inline=True)
embed.add_field(name='Games finished', value=str(
data.data_manager.total_games()), inline=True)
owner = (await self.client.application_info()).owner
embed.set_footer(text=f"Made by {owner}", icon_url=owner.avatar_url)
embed.set_thumbnail(url=constants.AVATAR_URL)
await ctx.send(embed=embed)
@cog_ext.cog_slash(name='botinfo', description='Info and stats about the bot.')
async def _botinfo(self, ctx: SlashContext):
embed = discord.Embed(title="Bot Info", color=0xff0000)
embed.add_field(name="Links",
value=f"[Github]({constants.GITHUB_LINK}) | [Invite]({constants.INVITE_LINK}) | [Join the discord server]({constants.SUPPORT_SERVER_INVITE}) | [Top.gg]({constants.TOPGG_LINK})",
inline=False)
embed.add_field(name='Version', value=version, inline=True)
embed.add_field(name="Info",
value='Chess Bot is a bot that plays chess. Use `$help` for a list of commands.', inline=False)
users = 0
for guild in self.client.guilds:
users += guild.member_count
embed.add_field(name="Stats", value="Stats", inline=False)
embed.add_field(name="Server Count", value=str(
len(self.client.guilds)), inline=True)
embed.add_field(name="Member Count", value=str(users), inline=True)
embed.add_field(
name="Up time", value=f'{util.pretty_time(time.time() - self.start_time)}', inline=True)
embed.add_field(name='Games in progress', value=str(
len(data.data_manager.get_games())), inline=True)
embed.add_field(name='Games finished', value=str(
data.data_manager.total_games()), inline=True)
owner = (await self.client.application_info()).owner
embed.set_footer(text=f"Made by {owner}", icon_url=owner.avatar_url)
embed.set_thumbnail(url=constants.AVATAR_URL)
await ctx.send(embed=embed)
@commands.command()
@commands.cooldown(1, 1, commands.BucketType.user)
async def invite(self, ctx):
'''
{
"name": "invite",
"description": "Sends an invite link for adding the bot to a server.",
"usage": "$invite",
"examples": [
"$invite"
],
"cooldown": 1
}
'''
await ctx.send(constants.INVITE_LINK)
@cog_ext.cog_slash(name='invite', description='Sends an invite link.')
async def _invite(self, ctx: SlashContext):
await ctx.send(constants.INVITE_LINK)
@commands.command()
@commands.cooldown(1, 3, commands.BucketType.user)
async def stats(self, ctx, person: typing.Union[discord.Member, discord.User] = None):
"""
{
"name": "stats",
"description": "Sends stats about the person.",
"usage": "$stats [person]",
"examples": [
"$stats",
"$stats @person"
],
"cooldown": 3
}
"""
if person is None:
person = ctx.author
lost, won, drew = data.data_manager.get_stats(person.id)
embed = discord.Embed(title=f'{person}\'s stats', color=0xfc26e0)
embed.add_field(name='Rating', value=str(
data.data_manager.get_rating(person.id)), inline=False)
embed.add_field(name='Games Played', value=str(
lost+won+drew), inline=False)
embed.add_field(name='Lost', value=str(lost))
embed.add_field(name='Won', value=str(won))
embed.add_field(name='Drawn', value=str(drew))
await ctx.send(embed=embed)
@cog_ext.cog_slash(name='stats', description='Basic stats about a user', options=[
create_option(name='person', description='The person you want to see the stats of.',
option_type=SlashCommandOptionType.USER, required=False)
])
async def _stats(self, ctx, person=None):
if person is None:
person = ctx.author
lost, won, drew = data.data_manager.get_stats(person.id)
embed = discord.Embed(title=f'{person}\'s stats', color=0xfc26e0)
embed.add_field(name='Rating', value=str(
data.data_manager.get_rating(person.id)), inline=False)
embed.add_field(name='Games Played', value=str(
lost+won+drew), inline=False)
embed.add_field(name='Lost', value=str(lost))
embed.add_field(name='Won', value=str(won))
embed.add_field(name='Drawn', value=str(drew))
await ctx.send(embed=embed)
@commands.command()
@commands.cooldown(1, 3, commands.BucketType.user)
async def notif(self, ctx):
'''
{
"name": "notif",
"description": "Sets the channel for your notifications.",
"usage": "$notif",
"examples": [
"$notif"
],
"cooldown": 3
}
'''
data.data_manager.change_settings(
ctx.author.id, new_notif=ctx.channel.id)
await ctx.send(f'Notification channel set to `{ctx.channel.name if ctx.guild is not None else "DM channel"}`')
@cog_ext.cog_slash(name='notif', description='Sets your default channel for recieving notifications.', options=[
create_option(name='type', description='View your notification channel, Test a notification, or Set your default channel.',
option_type=SlashCommandOptionType.STRING, required=True, choices=['view', 'test', 'set'])
])
async def _notif(self, ctx, type):
util2 = self.client.get_cog('Util')
if type == 'view':
channel = await util2.get_notifchannel(ctx.author.id)
await ctx.send(f'Your notification channel is `{channel}`.')
elif type == 'set':
data.data_manager.change_settings(
ctx.author.id, new_notif=ctx.channel.id)
await ctx.send(f'Notification channel set to `{ctx.channel.name if ctx.guild is not None else "DM channel"}`.')
else:
await ctx.send(f'You should recieve a test notification. If you do not, try changing your notification channel or changing your settings.')
await util2.send_notif(ctx.author.id, 'This is a test notification.')
def setup(bot):
bot.add_cog(Misc(bot))
| 41.059925 | 277 | 0.555505 | 2,600 | 21,926 | 4.588846 | 0.113077 | 0.018775 | 0.028162 | 0.039896 | 0.725505 | 0.68695 | 0.667756 | 0.655352 | 0.633392 | 0.612857 | 0 | 0.011703 | 0.318024 | 21,926 | 533 | 278 | 41.136961 | 0.786197 | 0 | 0 | 0.727528 | 0 | 0.02809 | 0.168863 | 0.033589 | 0 | 0 | 0.002592 | 0 | 0.011236 | 1 | 0.008427 | false | 0 | 0.042135 | 0 | 0.08427 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6f0f9ff94aa346ecd62aff006786e0048b36ed7c | 2,154 | py | Python | esppy/windows/__init__.py | PetreStegaroiu/python-esppy | d43781e94ad9236916901eeb3737d0b1b18d797a | [
"Apache-2.0"
] | null | null | null | esppy/windows/__init__.py | PetreStegaroiu/python-esppy | d43781e94ad9236916901eeb3737d0b1b18d797a | [
"Apache-2.0"
] | null | null | null | esppy/windows/__init__.py | PetreStegaroiu/python-esppy | d43781e94ad9236916901eeb3737d0b1b18d797a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function, division, absolute_import, unicode_literals
from .base import BaseWindow, Window, get_window_class, Target
from .subscriber import Subscriber
from .publisher import Publisher
from .aggregate import AggregateWindow
from .calculate import CalculateWindow
from .copy import CopyWindow
from .compute import ComputeWindow
from .counter import CounterWindow
from .filter import FilterWindow
from .functional import FunctionalWindow
from .geofence import GeofenceWindow
from .join import JoinWindow
from .modelsuper import ModelSupervisorWindow
from .modelreader import ModelReaderWindow
from .notification import NotificationWindow
from .objectTracker import ObjectTrackerWindow
from .pattern import PatternWindow
from .procedural import ProceduralWindow
from .removeState import RemoveStateWindow
from .score import ScoreWindow
from .source import SourceWindow
from .textcategory import TextCategoryWindow
from .textcontext import TextContextWindow
from .textsentiment import TextSentimentWindow
from .texttopic import TextTopicWindow
from .train import TrainWindow
from .transpose import TransposeWindow
from .union import UnionWindow
from .pythonmas import PythonHelper
def get_subclasses(cls):
for subclass in cls.__subclasses__():
for subcls in get_subclasses(subclass):
yield subcls
yield subclass
for cls in get_subclasses(BaseWindow):
if cls.window_type and not cls.is_hidden:
BaseWindow.window_classes['window-%s' % cls.window_type] = cls
| 34.741935 | 82 | 0.803157 | 270 | 2,154 | 6.333333 | 0.548148 | 0.035088 | 0.015205 | 0.018713 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002723 | 0.147632 | 2,154 | 61 | 83 | 35.311475 | 0.928649 | 0.273445 | 0 | 0 | 0 | 0 | 0.005814 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026316 | false | 0 | 0.789474 | 0 | 0.815789 | 0.026316 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
6f1b0c8a419cb3a9f5dac53f419b72f27b767fc3 | 412 | py | Python | utils.py | anunaym14/api | 3a4dfbbe4287da3129f0adcacead353cad91c199 | [
"Apache-2.0"
] | null | null | null | utils.py | anunaym14/api | 3a4dfbbe4287da3129f0adcacead353cad91c199 | [
"Apache-2.0"
] | null | null | null | utils.py | anunaym14/api | 3a4dfbbe4287da3129f0adcacead353cad91c199 | [
"Apache-2.0"
] | null | null | null | def get_date_from_zip(zip_name: str) -> str:
"""
Helper function to parse a date from a ROM zip's name
"""
return zip_name.split("-")[-1].split(".")[0]
def get_metadata_from_zip(zip_name: str) -> (str, str, str, str):
"""
Helper function to parse some data from ROM zip's name
"""
data = zip_name.replace(".zip", "").split("-")
return data[1], data[2], data[3], data[4]
| 29.428571 | 65 | 0.597087 | 66 | 412 | 3.575758 | 0.378788 | 0.127119 | 0.114407 | 0.118644 | 0.372881 | 0.372881 | 0 | 0 | 0 | 0 | 0 | 0.018692 | 0.220874 | 412 | 13 | 66 | 31.692308 | 0.716511 | 0.262136 | 0 | 0 | 0 | 0 | 0.026022 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6f1efefcd4c95c82f2d97f7681c5aa5a604d40bd | 319 | py | Python | gdxcompare/profiles/art6.py | jackjackk/gdxcompare | 53ac2b5e0f20e9b466384b16253ef93f1669f65a | [
"MIT"
] | 2 | 2017-04-27T08:42:49.000Z | 2021-05-27T19:58:11.000Z | gdxcompare/profiles/art6.py | jackjackk/gdxcompare | 53ac2b5e0f20e9b466384b16253ef93f1669f65a | [
"MIT"
] | 4 | 2016-12-14T08:58:08.000Z | 2017-07-07T15:26:27.000Z | gdxcompare/profiles/art6.py | jackjackk/gdxcompare | 53ac2b5e0f20e9b466384b16253ef93f1669f65a | [
"MIT"
] | 1 | 2017-07-07T12:37:28.000Z | 2017-07-07T12:37:28.000Z | filt_dict = {
'db': [['Emissions', '(Kyoto Gases|co2)$', '(|Energy and Industrial Processes|AFOLU)$', '', '(world|r5.*)'],
['Policy cost', '(Additional Total Energy System Cost|consumption Loss|gdp Loss)', '', '', '(world|r5.*)'],
['Price', 'Carbon', '$', '', '(world|r5.*)'],
],
}
| 45.571429 | 118 | 0.504702 | 31 | 319 | 5.16129 | 0.774194 | 0.13125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016 | 0.216301 | 319 | 6 | 119 | 53.166667 | 0.624 | 0 | 0 | 0 | 0 | 0 | 0.601881 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6f2561d49af321b885511719b42848651485ec82 | 1,621 | py | Python | app/core/tests/test_models.py | Mounika-dev/clever-recipes | 171ca2f5e2b02d10cf403ffab66f71cf5b0c200d | [
"MIT"
] | null | null | null | app/core/tests/test_models.py | Mounika-dev/clever-recipes | 171ca2f5e2b02d10cf403ffab66f71cf5b0c200d | [
"MIT"
] | null | null | null | app/core/tests/test_models.py | Mounika-dev/clever-recipes | 171ca2f5e2b02d10cf403ffab66f71cf5b0c200d | [
"MIT"
] | null | null | null | from django.test import TestCase
from django.contrib.auth import get_user_model
from core import models
def sample_user(email='test@hello.com', password='pass1234'):
"""to create a sample user for testing"""
return get_user_model().objects.create_user(email, password)
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
"""test if creating a new user with an email is successful"""
email = 'hello@people.com'
password = 'PasstheTest'
user = get_user_model().objects.create_user(email, password)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
"""Test if the new user's email id is normalized"""
email = 'test@HELLO.COM'
user = get_user_model().objects.create_user(email, 'test562')
self.assertEqual(user.email, email.lower())
def test_new_user_email_validity(self):
"""test if the new user has an email"""
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'testpw')
def test_create_new_superuser(self):
""" test creating a new superuser"""
user = get_user_model().objects.create_superuser('sup@user.com', 'pas')
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
def test_tag_str(self):
"""test if tag is in string representation"""
tag = models.Tag.objects.create(
user=sample_user(),
name='vegan'
)
self.assertEqual(str(tag), tag.name)
| 34.489362 | 79 | 0.669957 | 213 | 1,621 | 4.910798 | 0.300469 | 0.068834 | 0.068834 | 0.090822 | 0.305927 | 0.214149 | 0.120459 | 0.120459 | 0 | 0 | 0 | 0.005521 | 0.217767 | 1,621 | 46 | 80 | 35.23913 | 0.819401 | 0.148674 | 0 | 0 | 0 | 0 | 0.071217 | 0 | 0 | 0 | 0 | 0 | 0.241379 | 1 | 0.206897 | false | 0.172414 | 0.103448 | 0 | 0.37931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
6f4e174aadbc413d3c6649a3174e0bb72c79595d | 1,133 | py | Python | tools/invest/test_invest_globio_demo.py | mlacayoemery/esws | 1af5e99b8651edbc36588adcec969032081d6a77 | [
"MIT"
] | null | null | null | tools/invest/test_invest_globio_demo.py | mlacayoemery/esws | 1af5e99b8651edbc36588adcec969032081d6a77 | [
"MIT"
] | 22 | 2019-08-08T13:14:54.000Z | 2022-02-10T10:43:38.000Z | tools/invest/test_invest_globio_demo.py | mlacayoemery/esws | 1af5e99b8651edbc36588adcec969032081d6a77 | [
"MIT"
] | null | null | null | """"
This is a saved model run from natcap.invest.globio.
Generated: 11/06/17 11:05:24
InVEST version: 3.3.3
"""
import natcap.invest.globio
import os
args = {
u'aoi_uri': u'~/workspace/data/globio/sub_aoi.shp',
u'infrastructure_dir': u'~/workspace/data/globio/infrastructure_dir',
u'intensification_fraction': u'0.46',
u'lulc_to_globio_table_uri': u'~/workspace/data/globio/lulc_conversion_table.csv',
u'lulc_uri': u'~/workspace/data/globio/lulc_2008.tif',
u'msa_parameters_uri': u'~/workspace/data/globio/msa_parameters.csv',
u'pasture_threshold': u'0.5',
u'pasture_uri': u'~/workspace/data/globio/pasture.tif',
u'potential_vegetation_uri': u'~/workspace/data/globio/potential_vegetation.tif',
u'predefined_globio': False,
u'primary_threshold': u'0.66',
u'workspace_dir': u'C:/Users/lacayoem/Documents/globio_workspace',
}
if __name__ == '__main__':
for k in args.keys():
try:
args[k] = os.path.expanduser(args[k])
except AttributeError:
continue
natcap.invest.globio.execute(args)
| 33.323529 | 90 | 0.664607 | 159 | 1,133 | 4.528302 | 0.440252 | 0.111111 | 0.136111 | 0.194444 | 0.202778 | 0.075 | 0 | 0 | 0 | 0 | 0 | 0.029348 | 0.187996 | 1,133 | 33 | 91 | 34.333333 | 0.753261 | 0.092674 | 0 | 0 | 1 | 0 | 0.537708 | 0.395691 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.086957 | 0 | 0.086957 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6f5ed72c2e3e1d22e676fed35e61498c23601d4b | 589 | py | Python | app/models/verify.py | acdzh/EmbroideryColor-Backend | 4aef3d3600312b76c80249fe2a348a0d5ed535ac | [
"WTFPL"
] | null | null | null | app/models/verify.py | acdzh/EmbroideryColor-Backend | 4aef3d3600312b76c80249fe2a348a0d5ed535ac | [
"WTFPL"
] | null | null | null | app/models/verify.py | acdzh/EmbroideryColor-Backend | 4aef3d3600312b76c80249fe2a348a0d5ed535ac | [
"WTFPL"
] | null | null | null | from app import db
class Verify(db.Model):
__tablename__ = 'verifies'
num = db.Column(db.Integer, nullable=False)
url = db.Column(db.String(20), nullable=False)
uid = db.Column(db.Integer, nullable=False, primary_key=True)
def __init__(self, _num, _url, _uid):
self.num = _num
self.url = _url
self.uid = _uid
def __repr__(self):
return self.json()
def json(self):
return {
'num': self.num,
'url': self.url,
'uid': self.uid
}
def get_uid(self):
return self.uid
| 22.653846 | 65 | 0.563667 | 76 | 589 | 4.105263 | 0.368421 | 0.076923 | 0.096154 | 0.108974 | 0.192308 | 0.192308 | 0 | 0 | 0 | 0 | 0 | 0.004938 | 0.312394 | 589 | 25 | 66 | 23.56 | 0.765432 | 0 | 0 | 0 | 0 | 0 | 0.028862 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.05 | 0.15 | 0.65 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
6f637bd62982fcdd86e03e6715e68d37cd558b5d | 7,177 | py | Python | tests/test_tree.py | ml-lib/CodeLib | 33b5e9fb1be0ab9df86b3877bd6b84c2d0c34f78 | [
"BSD-3-Clause"
] | 2 | 2022-01-20T14:44:37.000Z | 2022-01-24T06:19:48.000Z | tests/test_tree.py | ml-lib/CodeLib | 33b5e9fb1be0ab9df86b3877bd6b84c2d0c34f78 | [
"BSD-3-Clause"
] | 16 | 2022-01-20T09:25:52.000Z | 2022-01-31T18:27:47.000Z | tests/test_tree.py | ml-lib/CodeLib | 33b5e9fb1be0ab9df86b3877bd6b84c2d0c34f78 | [
"BSD-3-Clause"
] | 1 | 2022-01-24T08:41:23.000Z | 2022-01-24T08:41:23.000Z | """
Test suite module for ``XGBoost``.
Credits
-------
::
Authors:
- Diptesh
- Madhu
Date: Sep 27, 2021
"""
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
import unittest
import warnings
import re
import sys
from inspect import getsourcefile
from os.path import abspath
import pandas as pd
from sklearn.model_selection import train_test_split as split
from sklearn import metrics as sk_metrics
# Set base path
path = abspath(getsourcefile(lambda: 0))
path = re.sub(r"(.+)(\/tests.*)", "\\1", path)
sys.path.insert(0, path)
from mllib.lib.tree import RandomForest # noqa: F841
from mllib.lib.tree import XGBoost # noqa: F841
# =============================================================================
# --- DO NOT CHANGE ANYTHING FROM HERE
# =============================================================================
path = path + "/data/input/"
# =============================================================================
# --- User defined functions
# =============================================================================
def ignore_warnings(test_func):
"""Suppress warnings."""
def do_test(self, *args, **kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
test_func(self, *args, **kwargs)
return do_test
class Test_RandomForest(unittest.TestCase):
"""Test suite for module ``RandomForest``."""
def setUp(self):
"""Set up for module ``RandomForest``."""
def test_rf_class(self):
"""RandomForest: Test for classification."""
x_var = ["x1", "x2", "x3", "x4"]
y_var = "y"
df_ip = pd.read_csv(path + "iris.csv")
df_ip = df_ip[[y_var] + x_var]
df_train, df_test = split(df_ip,
stratify=df_ip[y_var],
test_size=0.2,
random_state=42)
mod = RandomForest(df_train, y_var, x_var, method="classify")
y_hat = mod.predict(df_test[x_var])[y_var].tolist()
y = df_test[y_var].values.tolist()
acc = round(sk_metrics.accuracy_score(y, y_hat), 2)
self.assertGreaterEqual(acc, 0.93)
def test_rf_reg(self):
"""RandomForest: Test for regression."""
x_var = ["x1", "x2", "x3", "x4"]
y_var = "y"
df_ip = pd.read_csv(path + "iris.csv")
df_ip = df_ip[[y_var] + x_var]
df_train, df_test = split(df_ip,
stratify=df_ip[y_var],
test_size=0.2,
random_state=42)
mod = RandomForest(df_train, y_var, x_var, method="regression")
y_hat = mod.predict(df_test[x_var])[y_var].tolist()
y = df_test[y_var].values.tolist()
mse = round(sk_metrics.mean_squared_error(y, y_hat), 2)
self.assertLessEqual(mse, 0.1)
def test_rf_ts_exog(self):
"""RandomForest: Test for time series with exogenous variables"""
x_var = ["cost"]
y_var = "y"
test_perc = 0.2
df_ip = pd.read_excel(path + "test_time_series.xlsx",
sheet_name="exog")
df_ip = df_ip.set_index("ts")
df_train = df_ip.iloc[0:int(len(df_ip) * (1-test_perc)), :]
df_test = df_ip.iloc[int(len(df_ip) * (1-test_perc)): len(df_ip), :]
df_test = df_test[x_var]
mod = RandomForest(df_train, y_var, x_var, method="timeseries")
mod.predict(df_test)
metrics = mod.model_summary
self.assertGreaterEqual(metrics["rsq"], 0.8)
self.assertLessEqual(metrics["mape"], 0.5)
def test_rf_ts_endog(self):
"""RandomForest: Test for time series with endogenous variable"""
y_var = "y"
df_ip = pd.read_excel(path + "test_time_series.xlsx",
sheet_name="exog")
df_ip = df_ip.set_index("ts")
mod = RandomForest(df_ip, y_var, method="timeseries")
mod.predict()
metrics = mod.model_summary
self.assertGreaterEqual(metrics["rsq"], 0.7)
self.assertLessEqual(metrics["mape"], 0.7)
class Test_XGBoost(unittest.TestCase):
"""Test suite for module ``XGBoost``."""
def setUp(self):
"""Set up for module ``XGBoost``."""
def test_xgboost_class(self):
"""XGBoost: Test for classification."""
x_var = ["x1", "x2"]
y_var = "y"
df_ip = pd.read_csv(path + "iris.csv")
df_ip = df_ip[[y_var] + x_var]
df_train, df_test = split(df_ip,
stratify=df_ip[y_var],
test_size=0.2,
random_state=1)
mod = XGBoost(df_train, y_var, x_var, method="classify")
y_hat = mod.predict(df_test[x_var])[y_var].tolist()
y = df_test[y_var].values.tolist()
acc = round(sk_metrics.accuracy_score(y, y_hat), 2)
self.assertGreaterEqual(acc, 0.93)
def test_xgboost_reg(self):
"""XGBoost: Test for regression."""
x_var = ["x1", "x2", "x3", "x4"]
y_var = "y"
df_ip = pd.read_csv(path + "iris.csv")
df_ip = df_ip[[y_var] + x_var]
df_train, df_test = split(df_ip,
stratify=df_ip[y_var],
test_size=0.2,
random_state=1)
mod = XGBoost(df_train, y_var, x_var, method="regression")
y_hat = mod.predict(df_test[x_var])[y_var].tolist()
y = df_test[y_var].values.tolist()
mse = round(sk_metrics.mean_squared_error(y, y_hat), 2)
self.assertLessEqual(mse, 0.5)
def test_xgboost_ts_exog(self):
"""XGBoost: Test for time series with exogenous variables"""
x_var = ["cost"]
y_var = "y"
test_perc = 0.2
df_ip = pd.read_excel(path + "test_time_series.xlsx",
sheet_name="exog")
df_ip = df_ip.set_index("ts")
df_train = df_ip.iloc[0:int(len(df_ip) * (1-test_perc)), :]
df_test = df_ip.iloc[int(len(df_ip) * (1-test_perc)): len(df_ip), :]
df_test = df_test[x_var]
mod = XGBoost(df_train, y_var, x_var, method="timeseries")
mod.predict(df_test)
metrics = mod.model_summary
self.assertAlmostEqual(1.0, metrics["rsq"], places=1)
self.assertLessEqual(metrics["mape"], 0.1)
def test_xgboost_ts_endog(self):
"""XGBoost: Test for time series with endogenous variable"""
y_var = "y"
df_ip = pd.read_excel(path + "test_time_series.xlsx",
sheet_name="exog")
df_ip = df_ip.set_index("ts")
mod = XGBoost(df_ip, y_var, method="timeseries")
mod.predict()
metrics = mod.model_summary
self.assertAlmostEqual(1.0, metrics["rsq"], places=1)
self.assertLessEqual(metrics["mape"], 0.1)
# =============================================================================
# --- Main
# =============================================================================
if __name__ == '__main__':
unittest.main()
| 35.35468 | 79 | 0.532534 | 909 | 7,177 | 3.951595 | 0.163916 | 0.048998 | 0.016704 | 0.022272 | 0.735802 | 0.706292 | 0.687361 | 0.643931 | 0.643931 | 0.622494 | 0 | 0.016349 | 0.275603 | 7,177 | 202 | 80 | 35.529703 | 0.674553 | 0.181134 | 0 | 0.639098 | 0 | 0 | 0.05733 | 0.014505 | 0 | 0 | 0 | 0 | 0.090226 | 1 | 0.090226 | false | 0 | 0.082707 | 0 | 0.195489 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6f6621fab68647066da29a64130e78fa4cc37cda | 369 | bzl | Python | third_party/dotnet/nuget/packages/system.threading.tasks.extensions.bzl | GQAssurance/selenium | fc93242e17385966cd2ad9088e1044ed6e8bf148 | [
"Apache-2.0"
] | 2 | 2021-06-28T15:02:22.000Z | 2021-07-06T11:57:51.000Z | third_party/dotnet/nuget/packages/system.threading.tasks.extensions.bzl | GQAssurance/selenium | fc93242e17385966cd2ad9088e1044ed6e8bf148 | [
"Apache-2.0"
] | 4 | 2020-07-17T08:44:45.000Z | 2021-05-09T06:12:03.000Z | third_party/dotnet/nuget/packages/system.threading.tasks.extensions.bzl | lucianodgs/selenium | e2b2b97de0d9a6cc86563c866a9361237519159f | [
"Apache-2.0"
] | 2 | 2019-06-27T19:40:39.000Z | 2019-11-25T23:36:37.000Z | package(default_visibility = [ "//visibility:public" ])
load("@io_bazel_rules_dotnet//dotnet:defs.bzl", "net_import_library", "core_import_library")
net_import_library(
name = "net45",
src = "lib/netstandard1.0/System.Threading.Tasks.Extensions.dll",
)
core_import_library(
name = "netcore",
src = "lib/netstandard2.0/System.Threading.Tasks.Extensions.dll"
)
| 28.384615 | 92 | 0.750678 | 47 | 369 | 5.638298 | 0.595745 | 0.196226 | 0.120755 | 0.158491 | 0.256604 | 0.256604 | 0 | 0 | 0 | 0 | 0 | 0.01791 | 0.092141 | 369 | 12 | 93 | 30.75 | 0.773134 | 0 | 0 | 0 | 0 | 0 | 0.593496 | 0.409214 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.3 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6f71c8b94b3b3021b4bbe64d8495234d0f7f285b | 3,139 | py | Python | deepensemble/combiner/modelcombiner.py | pdoren/correntropy-and-ensembles-in-deep-learning | b8e39e0ea97395e9f4ef5e9b351551a89fedc885 | [
"MIT"
] | 1 | 2017-11-22T15:35:45.000Z | 2017-11-22T15:35:45.000Z | deepensemble/combiner/modelcombiner.py | pdoren/correntropy-and-ensembles-in-deep-learning | b8e39e0ea97395e9f4ef5e9b351551a89fedc885 | [
"MIT"
] | null | null | null | deepensemble/combiner/modelcombiner.py | pdoren/correntropy-and-ensembles-in-deep-learning | b8e39e0ea97395e9f4ef5e9b351551a89fedc885 | [
"MIT"
] | 1 | 2021-12-14T04:16:57.000Z | 2021-12-14T04:16:57.000Z | from collections import OrderedDict
from ..utils.serializable import Serializable
__all__ = ['ModelCombiner']
class ModelCombiner(Serializable):
""" Base class for mixing output of models.
Attributes
----------
_param : dict
Parameters of combiner method.
_type_model : str, "regressor" by default
Type of model: regressor or classifier
Parameters
----------
param : dict
Parameters of combiner method.
type_model : str
Type of model: regressor or classifier
"""
# noinspection PyUnusedLocal
def __init__(self, param=None, type_model="regressor"):
super(ModelCombiner, self).__init__()
self._param = {'name': 'Combiner', 'value': None, 'shape': None, 'init': False, 'include': False}
self._type_model = type_model
def get_type_model(self):
""" Get type of model.
Returns
-------
str
Returns one string with the type of model: "regressor" or "classifier"
"""
return self._type_model
def get_param(self, only_values=False):
""" Getter model combinator parameters.
Returns
-------
theano.shared
Returns model parameters.
"""
if not only_values:
return self._param
else:
return self._param['value']
def output(self, ensemble_model, _input, prob):
""" Mixing the output or diversity of ensemble's models.
Parameters
----------
ensemble_model : EnsembleModel
Ensemble Model it uses for get ensemble's models.
_input : theano.tensor.matrix or numpy.array
Input sample.
prob : bool
In the case of classifier if is True the output is probability, for False means the output is translated.
Is recommended hold True for training because the translate function is non-differentiable.
Returns
-------
theano.tensor.matrix
Returns the mixing diversity of ensemble's models.
"""
raise NotImplementedError
def predict(self, ensemble_model, _input):
""" Compute the diversity of model.
Parameters
----------
ensemble_model : EnsembleModel
Ensemble model where gets the output.
_input : theano.tensor.matrix or numpy.array
Input sample.
Returns
-------
numpy.array
Return the diversity of model.
"""
return self.output(ensemble_model, _input, prob=False).eval()
def update_parameters(self, ensemble_model, _input, _target):
""" Update internal parameters.
Parameters
----------
ensemble_model : EnsembleModel
Ensemble Model it uses for get ensemble's models.
_input : theano.tensor.matrix
Input sample.
_target : theano.tensor.matrix
Target sample.
Returns
-------
OrderedDict
A dictionary mapping each parameter to its update expression.
"""
return None
| 27.060345 | 117 | 0.590953 | 324 | 3,139 | 5.580247 | 0.320988 | 0.071903 | 0.049779 | 0.033186 | 0.308628 | 0.279867 | 0.199668 | 0.199668 | 0.199668 | 0.109513 | 0 | 0 | 0.319529 | 3,139 | 115 | 118 | 27.295652 | 0.846442 | 0.539344 | 0 | 0 | 0 | 0 | 0.062048 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.095238 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
488c2eea163933cca572d28c0faeac94ebd610f9 | 69 | py | Python | config.py | marmelab/snake_solver_one | 4d17174ed4bc57e8d7ecd72ce508d0ff80b6349f | [
"MIT"
] | 3 | 2016-08-17T21:23:45.000Z | 2021-02-08T11:45:21.000Z | config.py | marmelab/snake_solver_one | 4d17174ed4bc57e8d7ecd72ce508d0ff80b6349f | [
"MIT"
] | 2 | 2016-06-08T09:55:03.000Z | 2016-08-16T07:04:19.000Z | config.py | marmelab/snake_solver_one | 4d17174ed4bc57e8d7ecd72ce508d0ff80b6349f | [
"MIT"
] | 1 | 2018-11-29T10:49:28.000Z | 2018-11-29T10:49:28.000Z | WIDTH = 50
HEIGHT = 10
MAX_WIDTH = WIDTH - 2
MAX_HEIGHT = HEIGHT - 2
| 13.8 | 23 | 0.681159 | 12 | 69 | 3.75 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113208 | 0.231884 | 69 | 4 | 24 | 17.25 | 0.735849 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
48a6f47b5b29f6490f83299bbeef5b7c1a4bbac3 | 883 | py | Python | Products/SimpleUserFolder/tests/security.py | Simplistix/SimpleUserFolder | a5e5cb655080037c68a2c9d16435aca8050b4623 | [
"MIT"
] | 1 | 2018-05-17T15:38:53.000Z | 2018-05-17T15:38:53.000Z | Products/SimpleUserFolder/tests/security.py | Simplistix/SimpleUserFolder | a5e5cb655080037c68a2c9d16435aca8050b4623 | [
"MIT"
] | 1 | 2019-04-03T12:44:29.000Z | 2020-02-13T09:46:52.000Z | Products/SimpleUserFolder/tests/security.py | Simplistix/SimpleUserFolder | a5e5cb655080037c68a2c9d16435aca8050b4623 | [
"MIT"
] | 3 | 2019-05-10T07:18:14.000Z | 2021-03-08T13:12:00.000Z | from Acquisition import Implicit
class PermissiveSecurityPolicy:
"""
Very permissive security policy for unit testing purposes.
"""
#
# Standard SecurityPolicy interface
#
def validate( self
, accessed=None
, container=None
, name=None
, value=None
, context=None
, roles=None
, *args
, **kw):
return 1
def checkPermission( self, permission, object, context) :
if permission == 'forbidden permission':
return 0
return 1
class OmnipotentUser( Implicit ):
"""
Omnipotent User for unit testing purposes.
"""
def getId( self ):
return 'all_powerful_Oz'
getUserName = getId
def allowed( self, object, object_roles=None ):
return 1
| 23.864865 | 66 | 0.536806 | 77 | 883 | 6.116883 | 0.597403 | 0.044586 | 0.059448 | 0.093418 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007435 | 0.390713 | 883 | 36 | 67 | 24.527778 | 0.86803 | 0.156285 | 0 | 0.136364 | 0 | 0 | 0.049575 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.045455 | 0.136364 | 0.590909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
48a9e505a19ae2e5c71b0586672a9d3cf611621e | 2,157 | py | Python | vector3d/point.py | jayman39tx/vector3d | 6ea8a759b5102b65aea33173da1ed78cde9a67b0 | [
"MIT"
] | null | null | null | vector3d/point.py | jayman39tx/vector3d | 6ea8a759b5102b65aea33173da1ed78cde9a67b0 | [
"MIT"
] | null | null | null | vector3d/point.py | jayman39tx/vector3d | 6ea8a759b5102b65aea33173da1ed78cde9a67b0 | [
"MIT"
] | null | null | null | from math import sqrt
class Point:
x = float()
y = float()
z = float()
def __init__(self, x=0, y=0, z=0):
self.x = x
self.y = y
self.z = z
def __eq__(self, other_point):
is_point_bool = isinstance(other_point, Point)
is_x_eq = self.x == other_point.x
is_y_eq = self.y == other_point.y
is_z_eq = self.z == other_point.z
return(is_point_bool and is_x_eq and is_y_eq and is_z_eq)
def __ne__(self, other_point):
is_point_bool = isinstance(other_point, Point)
is_x_eq = self.x == other_point.x
is_y_eq = self.y == other_point.y
is_z_eq = self.z == other_point.z
if (is_point_bool and is_x_eq and is_y_eq and is_z_eq):
return(False)
else:
return(True)
def clone(self):
return(Point(self.x, self.y, self.z))
def equals(self, other_point):
return(self == other_point)
def getX(self):
return(self.x)
def getY(self):
return(self.y)
def getZ(self):
return(self.z)
def getLocation(self):
return(Point(self.x, self.y, self.z))
def getLocationTuple(self):
return((self.x, self.y, self.z))
def move(self, x=0, y=0, z=0):
self.x = x
self.y = y
self.z = z
def setLocation(self, x=0, y=0, z=0):
self.x = x
self.y = y
self.z = z
def setLocationFromPoint(self, other_point):
self.x = other_point.x
self.y = other_point.y
self.z = other_point.z
def toString(self):
return('Point: %(point_x)s, %(point_y)s, %(point_z)s' %{'point_x': self.x, "point_y": self.y, "point_z": self.z})
def translate(self, x=0, y=0, z=0):
self.x += x
self.y += y
self.z += z
def distance(a, b):
# print("calculating distance between points: ", a.toString(), b.toString())
return sqrt((b.x - a.x)^2 + (b.y - a.y)^2 + (b.z - a.z)^2)
def center(a, b):
return Point((b.x - a.x)/2 + a.x, (b.y - a.y)/2 + a.y, (b.z - a.z)/2 + a.z)
| 26.9625 | 122 | 0.528048 | 355 | 2,157 | 3.022535 | 0.135211 | 0.074557 | 0.044734 | 0.026095 | 0.542404 | 0.469711 | 0.469711 | 0.452936 | 0.452936 | 0.452936 | 0 | 0.012405 | 0.327306 | 2,157 | 79 | 123 | 27.303797 | 0.727085 | 0.034307 | 0 | 0.316667 | 0 | 0 | 0.032468 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.283333 | false | 0 | 0.016667 | 0.166667 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
48adfce99cd7ee7036b0bdf962880fbb939b1471 | 113 | py | Python | python/1078.py | LourdesOshiroIgarashi/uri-begginner | 05f7993dfde2c7cc49e5b74907dee6297c82f447 | [
"MIT"
] | 3 | 2021-05-17T05:39:08.000Z | 2021-05-23T05:14:54.000Z | python/1078.py | LourdesOshiroIgarashi/uri-beginner | 05f7993dfde2c7cc49e5b74907dee6297c82f447 | [
"MIT"
] | null | null | null | python/1078.py | LourdesOshiroIgarashi/uri-beginner | 05f7993dfde2c7cc49e5b74907dee6297c82f447 | [
"MIT"
] | null | null | null | x = int(input())
for i in range(1, 11):
resultado = i * x
print("{} x {} = {}".format(i, x, resultado))
| 18.833333 | 49 | 0.504425 | 18 | 113 | 3.166667 | 0.666667 | 0.070175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035714 | 0.256637 | 113 | 5 | 50 | 22.6 | 0.642857 | 0 | 0 | 0 | 0 | 0 | 0.106195 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
48b8d3bbfd55b045e569b9c6b6ff0dfa25ab5c18 | 981 | py | Python | Card.py | vitalij-gotovskij/pocker | 7ba9db47e8fbe6c0f41c64b84fb379452a41eedb | [
"MIT"
] | null | null | null | Card.py | vitalij-gotovskij/pocker | 7ba9db47e8fbe6c0f41c64b84fb379452a41eedb | [
"MIT"
] | null | null | null | Card.py | vitalij-gotovskij/pocker | 7ba9db47e8fbe6c0f41c64b84fb379452a41eedb | [
"MIT"
] | 8 | 2022-03-17T18:37:43.000Z | 2022-03-28T13:34:08.000Z | from enum import Enum, auto, unique
class Suit(Enum):
HEARTS = "♥"
DIAMONDS = "♦"
CLUBS = "♣"
SPADES = "♠"
class CardRank(Enum):
TWO = auto()
THREE = auto()
FOUR = auto()
FIVE = auto()
SIX = auto()
SEVEN = auto()
EIGHT = auto()
NINE = auto()
TEN = auto()
JACK = auto()
QUEEN = auto()
KING = auto()
ACE = auto()
class Card():
def __init__(self, value, suit):
self.__value = value
self.__suit = suit
def getValue(self):
return self.__value
def getSuit(self):
return self.__suit
def __str__(self):
return f"{self.__value.name} of {self.__suit.value}"
def __repr__(self):
return self.__str__()
def __eq__(self, card2):
return self.__value.value == card2.getValue().value
def __lt__(self, card2):
return self.__value.value < card2.getValue().value
| 20.4375 | 61 | 0.522936 | 111 | 981 | 4.297297 | 0.405405 | 0.113208 | 0.08805 | 0.079665 | 0.197065 | 0.197065 | 0.197065 | 0.197065 | 0.197065 | 0 | 0 | 0.00627 | 0.349643 | 981 | 47 | 62 | 20.87234 | 0.73511 | 0 | 0 | 0 | 0 | 0 | 0.049251 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.194444 | false | 0 | 0.027778 | 0.166667 | 0.944444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
48d47c747ba680c9121d62a07c21648e022f4229 | 101 | py | Python | Contest/ABC165/d/main.py | mpses/AtCoder | 9c101fcc0a1394754fcf2385af54b05c30a5ae2a | [
"CC0-1.0"
] | null | null | null | Contest/ABC165/d/main.py | mpses/AtCoder | 9c101fcc0a1394754fcf2385af54b05c30a5ae2a | [
"CC0-1.0"
] | null | null | null | Contest/ABC165/d/main.py | mpses/AtCoder | 9c101fcc0a1394754fcf2385af54b05c30a5ae2a | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python3
a, b, n = map(int, input().split())
x = min(b-1, n)
print(a*x//b - a*(x // b)) | 25.25 | 35 | 0.524752 | 23 | 101 | 2.304348 | 0.652174 | 0.075472 | 0.113208 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023529 | 0.158416 | 101 | 4 | 36 | 25.25 | 0.6 | 0.207921 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
48d81278fa035bd43c85b6d7eabe4a07038e7f4c | 2,510 | py | Python | tests/dhcpv4/relay/test_v4_decline_relay.py | isc-projects/forge | dfec8b41003d6b5a229f69ee93616e0e5cc6d71b | [
"0BSD"
] | 22 | 2015-02-27T11:51:05.000Z | 2022-02-28T12:39:29.000Z | tests/dhcpv4/relay/test_v4_decline_relay.py | isc-projects/forge | dfec8b41003d6b5a229f69ee93616e0e5cc6d71b | [
"0BSD"
] | 16 | 2018-10-30T15:00:12.000Z | 2019-01-11T17:55:13.000Z | tests/dhcpv4/relay/test_v4_decline_relay.py | isc-projects/forge | dfec8b41003d6b5a229f69ee93616e0e5cc6d71b | [
"0BSD"
] | 11 | 2015-02-27T11:51:36.000Z | 2021-03-30T08:33:54.000Z | """DHCPv4 address decline process"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_msg
import misc
import srv_control
@pytest.mark.v4
@pytest.mark.relay
@pytest.mark.decline
def test_v4_relay_decline_success():
# check if DECLINE works when sent over relay
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '192.168.50.1-192.168.50.1')
srv_control.set_conf_parameter_global('decline-probation-period', 2)
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
# this is setting for every message in this test
srv_msg.network_variable('source_port', 67)
srv_msg.network_variable('source_address', '$(GIADDR4)')
srv_msg.network_variable('destination_address', '$(SRV4_ADDR)')
srv_msg.client_sets_value('Client', 'giaddr', '$(GIADDR4)')
srv_msg.client_sets_value('Client', 'hops', 1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'giaddr', '$(GIADDR4)')
srv_msg.client_sets_value('Client', 'hops', 1)
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'giaddr', '$(GIADDR4)')
srv_msg.client_sets_value('Client', 'hops', 1)
srv_msg.client_copy_option('server_id')
srv_msg.client_sets_value('Client', 'ciaddr', '0.0.0.0')
srv_msg.client_does_include_with_value('requested_addr', '192.168.50.1')
srv_msg.client_send_msg('DECLINE')
misc.pass_criteria()
srv_msg.send_dont_wait_for_message()
# wait probation period
srv_msg.forge_sleep(2, 'seconds')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'giaddr', '$(GIADDR4)')
srv_msg.client_sets_value('Client', 'hops', 1)
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_does_include_with_value('client_id', '00010203040111')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_content('yiaddr', '192.168.50.1')
| 34.383562 | 81 | 0.724701 | 375 | 2,510 | 4.490667 | 0.272 | 0.110451 | 0.135392 | 0.095012 | 0.618765 | 0.579572 | 0.546318 | 0.527316 | 0.527316 | 0.527316 | 0 | 0.055302 | 0.128287 | 2,510 | 72 | 82 | 34.861111 | 0.714351 | 0.074502 | 0 | 0.529412 | 0 | 0 | 0.227312 | 0.021175 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019608 | true | 0.078431 | 0.078431 | 0 | 0.098039 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
48dbb26859f0afaa281b9bcca0ba755f9d965d2b | 401 | py | Python | db_quick_setup/__init__.py | amezin/django-db-quick-setup | e0c90c8b112b2230b19885e39a92b67b5a7d3819 | [
"BSD-2-Clause"
] | 1 | 2016-05-27T14:25:37.000Z | 2016-05-27T14:25:37.000Z | db_quick_setup/__init__.py | amezin/django-db-quick-setup | e0c90c8b112b2230b19885e39a92b67b5a7d3819 | [
"BSD-2-Clause"
] | null | null | null | db_quick_setup/__init__.py | amezin/django-db-quick-setup | e0c90c8b112b2230b19885e39a92b67b5a7d3819 | [
"BSD-2-Clause"
] | null | null | null | from __future__ import absolute_import
from docker.utils import kwargs_from_env
from django.utils import six
from socket import getaddrinfo
def find_docker_host():
url = kwargs_from_env().get('base_url', None)
if url is None:
return '127.0.0.1'
host = six.moves.urllib.urlparse(url).hostname
if not host:
return '127.0.0.1'
return getaddrinfo(host, None)[4][0]
| 23.588235 | 50 | 0.703242 | 63 | 401 | 4.285714 | 0.492063 | 0.081481 | 0.096296 | 0.081481 | 0.088889 | 0 | 0 | 0 | 0 | 0 | 0 | 0.043478 | 0.197007 | 401 | 16 | 51 | 25.0625 | 0.795031 | 0 | 0 | 0.166667 | 0 | 0 | 0.064838 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
48e6966309a3d1deed2aa006f11928b40d2a4f4b | 586 | py | Python | jiraniapp/tests.py | Calebu6214/Neighborhood | 0a3f5ed9b426197bf7924fff897e5b7474b31f3a | [
"MIT"
] | null | null | null | jiraniapp/tests.py | Calebu6214/Neighborhood | 0a3f5ed9b426197bf7924fff897e5b7474b31f3a | [
"MIT"
] | null | null | null | jiraniapp/tests.py | Calebu6214/Neighborhood | 0a3f5ed9b426197bf7924fff897e5b7474b31f3a | [
"MIT"
] | null | null | null | from django.test import TestCase
from django.contrib.auth.models import User
from .models import *
import datetime as dt
# Create your tests here.
class neighbourhoodTestClass(TestCase):
def setUp(self):
self.kibra = neighbourhood(neighbourhood='kibra')
def test_instance(self):
self.assertTrue(isinstance(self.kibra,neighbourhood))
def tearDown(self):
neighbourhood.objects.all().delete()
def test_save_method(self):
self.kibra.save_neighbourhood()
jirani = neighbourhood.objects.all()
self.assertTrue(len(jirani)>0)
| 27.904762 | 61 | 0.713311 | 69 | 586 | 6 | 0.507246 | 0.057971 | 0.062802 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002096 | 0.186007 | 586 | 20 | 62 | 29.3 | 0.865828 | 0.039249 | 0 | 0 | 0 | 0 | 0.008913 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 1 | 0.266667 | false | 0 | 0.266667 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
48e70295ac4f2ffdaa2239103e92fdfd53797e94 | 2,574 | py | Python | kiteconnect/exceptions.py | mohamedsaligh/pykiteconnect | f0d5653bc724de85b036b287f0100dd2ecdba784 | [
"MIT"
] | 1 | 2020-06-13T15:29:26.000Z | 2020-06-13T15:29:26.000Z | kiteconnect/exceptions.py | mohamedsaligh/pykiteconnect | f0d5653bc724de85b036b287f0100dd2ecdba784 | [
"MIT"
] | null | null | null | kiteconnect/exceptions.py | mohamedsaligh/pykiteconnect | f0d5653bc724de85b036b287f0100dd2ecdba784 | [
"MIT"
] | 1 | 2021-09-14T13:46:15.000Z | 2021-09-14T13:46:15.000Z | # -*- coding: utf-8 -*-
"""
exceptions.py
Exceptions raised by the Kite Connect client.
:copyright: (c) 2017 by Zerodha Technology.
:license: see LICENSE for details.
"""
class KiteException(Exception):
"""
Base exception class representing a Kite client exception.
Every specific Kite client exception is a subclass of this
and exposes two instance variables `.code` (HTTP error code)
and `.message` (error text).
"""
def __init__(self, message, code=500):
"""Initialize the exception."""
super(KiteException, self).__init__(message)
self.code = code
class GeneralException(KiteException):
"""An unclassified, general error. Default code is 500."""
def __init__(self, message, code=500):
"""Initialize the exception."""
super(GeneralException, self).__init__(message, code)
class TokenException(KiteException):
"""Represents all token and authentication related errors. Default code is 403."""
def __init__(self, message, code=403):
"""Initialize the exception."""
super(TokenException, self).__init__(message, code)
class PermissionException(KiteException):
"""Represents permission denied exceptions for certain calls. Default code is 403."""
def __init__(self, message, code=403):
"""Initialize the exception."""
super(PermissionException, self).__init__(message, code)
class OrderException(KiteException):
"""Represents all order placement and manipulation errors. Default code is 500."""
def __init__(self, message, code=500):
"""Initialize the exception."""
super(OrderException, self).__init__(message, code)
class InputException(KiteException):
"""Represents user input errors such as missing and invalid parameters. Default code is 400."""
def __init__(self, message, code=400):
"""Initialize the exception."""
super(InputException, self).__init__(message, code)
class DataException(KiteException):
"""Represents a bad response from the backend Order Management System (OMS). Default code is 502."""
def __init__(self, message, code=502):
"""Initialize the exception."""
super(DataException, self).__init__(message, code)
class NetworkException(KiteException):
"""Represents a network issue between Kite and the backend Order Management System (OMS). Default code is 503."""
def __init__(self, message, code=503):
"""Initialize the exception."""
super(NetworkException, self).__init__(message, code)
| 31.777778 | 117 | 0.690365 | 287 | 2,574 | 5.968641 | 0.324042 | 0.096322 | 0.051372 | 0.084063 | 0.366608 | 0.244016 | 0.244016 | 0.244016 | 0.244016 | 0.189142 | 0 | 0.024201 | 0.197358 | 2,574 | 80 | 118 | 32.175 | 0.804937 | 0.451049 | 0 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.32 | false | 0 | 0 | 0 | 0.64 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
48e8f51357886ded7ca789487247660fc205383f | 551 | py | Python | applications/dpto/serializers.py | PEM-Humboldt/visor-geografico-I2d-backend | 5b5f4e0eee07e14bd8124cec624b5a5004c4d168 | [
"MIT"
] | null | null | null | applications/dpto/serializers.py | PEM-Humboldt/visor-geografico-I2d-backend | 5b5f4e0eee07e14bd8124cec624b5a5004c4d168 | [
"MIT"
] | null | null | null | applications/dpto/serializers.py | PEM-Humboldt/visor-geografico-I2d-backend | 5b5f4e0eee07e14bd8124cec624b5a5004c4d168 | [
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import DptoQueries,DptoAmenazas
class dptoQueriesSerializer(serializers.ModelSerializer):
class Meta:
model= DptoQueries
fields =(
'tipo',
'registers',
'species',
'exoticas',
'endemicas'
)
class dptoDangerSerializer(serializers.ModelSerializer):
class Meta:
model= DptoAmenazas
fields =(
'codigo',
'tipo',
'amenazadas',
'nombre'
)
| 21.192308 | 57 | 0.549909 | 37 | 551 | 8.162162 | 0.621622 | 0.172185 | 0.205298 | 0.231788 | 0.264901 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.368421 | 551 | 25 | 58 | 22.04 | 0.867816 | 0 | 0 | 0.285714 | 0 | 0 | 0.114545 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.095238 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
48fb33411f2b9fa1aba64918545cc26ac5d94343 | 7,538 | py | Python | 3 - priority_purge.py | tsitokely/ETL-Analysis | 45f88c4f8f453fc0c8a9612778a9c0110b067ddb | [
"MIT"
] | null | null | null | 3 - priority_purge.py | tsitokely/ETL-Analysis | 45f88c4f8f453fc0c8a9612778a9c0110b067ddb | [
"MIT"
] | null | null | null | 3 - priority_purge.py | tsitokely/ETL-Analysis | 45f88c4f8f453fc0c8a9612778a9c0110b067ddb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import sqlite3 as lite
import sys
import datetime
import csv
now = datetime.datetime.now()
currdate = now.strftime("%Y%m%d")
orig_stdout = sys.stdout
dbcon = lite.connect(r"WORK_PATH\\Purge\\suivi_purge.db")
#Analysis of current day turnover
cur = dbcon.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cur.fetchall()
nomtable = tables[0][0]
nomtable = nomtable.decode("utf-8")
cur.execute("select count(*) from purge%s" % currdate)
rownumber = cur.fetchone()
rownum = rownumber[0]
cur.execute("select msisdn_9 from purge%s group by msisdn_9 having count(*)>1;" % currdate)
duplicate = cur.fetchall()
cur.execute("select count(*) from purge%s where Flag_priorite1 = 1 or Flag_priorite2 = 1 or Flag_priorite3 = 1 or Flag_priorite4 = 1 or Flag_priorite5 = 1 or Flag_priorite6 = 1;" % currdate)
flagnumber = cur.fetchone()
# Priorité 1
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = '' -- KYC
and
Statut_IN = 'Desactive'
and
stk_flag = 0
and
date(OM_date_registration) is null
and
main_account = '0.00'
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0);""" % currdate)
P1Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite1 = 1;" % currdate)
P1F = cur.fetchone()
P1diff = P1Q[0] - P1F[0]
# Priorité 2
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
date(OM_date_registration) is null
and
main_account = '0.00'
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)
and
cast(duree_inactivite as int) >365
and
(date(last_recharge) < date('now','-1 year','-1 day')
or date(last_recharge) is null)""" % currdate)
P2Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite2 = 1;" % currdate)
P2F = cur.fetchone()
P2diff = P2Q[0] - P2F[0]
# Priorité 3
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
OM_date_registration = '\N'
and
(round(main_account,2) <= 50.00
and round(main_account,2) > 0.00)
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)
and
cast(duree_inactivite as int) >365
and
(date(last_recharge) < date('now','-1 year','-2 day')
or date(last_recharge) is null)""" % currdate)
P3Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite3 = 1" % currdate)
P3F = cur.fetchone()
P3diff = P3Q[0] - P3F[0]
# Priorité 4
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
date(OM_date_registration) is null
and
main_account = '0.00'
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)
and
cast(duree_inactivite as int) <= 365
and
cast(duree_inactivite as int) > 180
and
(date(last_recharge) < date('now','-1 year','-2 day')
or date(last_recharge) is null)""" % currdate)
P4Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite4 = 1" % currdate)
P4F = cur.fetchone()
P4diff = P4Q[0] - P4F[0]
# Priorité 5
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
OM_date_registration = '\N'
and
(round(main_account,2) <= 50.00
and round(main_account,2) > 0.00)
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)
and
cast(duree_inactivite as int) <= 365
and
cast(duree_inactivite as int) > 180
and
(date(last_recharge) < date('now','-1 year','-2 day')
or date(last_recharge) is null)""" % currdate)
P5Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite5 = 1" % currdate)
P5F = cur.fetchone()
P5diff = P5Q[0] - P5F[0]
# Priorité 6
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
OM_date_registration = '\N'
and
(round(main_account,2) <= 2000.00
and round(main_account,2) > 50.00)
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)
and
cast(duree_inactivite as int) > 365
and
(date(last_recharge) < date('now','-1 year','-2 day')
or date(last_recharge) is null)""" % currdate)
P6Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite6 = 1" % currdate)
P6F = cur.fetchone()
P6diff = P6Q[0] - P6F[0]
# Priorité 7
cur.execute("""select count(*) from purge%s
where
plan_tarifaire in('5001','5006','5007')
and
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
date(OM_date_registration) is null
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)""" % currdate)
P7Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite7 = 1" % currdate)
P7F = cur.fetchone()
P7diff = P7Q[0] - P7F[0]
# Priorité 8
cur.execute("""select count(*) from purge%s
where
Statut_FT = 'INACTIF'
and
KYC = ''
and
Statut_IN = 'Inactif'
and
stk_flag = 0
and
date(OM_date_registration) is null
and
Channel_user_category = '\N'
and
(Channel_balance = '\N' or Channel_balance = 0)
and
cast(duree_inactivite as int) >= 365""" % currdate)
P8Q = cur.fetchone()
cur.execute("select count(*) from purge%s where Flag_priorite8 = 1" % currdate)
P8F = cur.fetchone()
P8diff = P8Q[0] - P8F[0]
dbcon.close()
filename_log = "log_priority%s.txt" % currdate
f = file(r"WORK_PATH\\Purge\\Result\\%s" % filename_log, 'w')
sys.stdout = f
print "------------------ Analysis of table %s ------------------" % nomtable
print "- The table %s has %s rows" % (nomtable,rownum)
print "- There are %s duplicate in table %s: " % (len(duplicate),nomtable)
for i in duplicate:
print i[0]
print "- %s MSISDN have a flag in the table %s" % (flagnumber[0],nomtable)
print ""
for x in range(1,9):
PXQ = "P" + str(x) + "Q"
PXF = "P" + str(x) + "F"
PXdiff = "P" + str(x) + "diff"
print "------------------ Priority %i ------------------" % x
if eval(PXdiff) == 0 :
print "Verification of Priority %i OK" % x
print "Number of MSISDN with a flag priority %i: %i" % (x,eval(PXF)[0])
else:
print "The difference is: %i for priority %i " % (eval(PXdiff),x)
print "Results of query %i" % eval(PXQ)[0]
print "Results of flag %i" % eval(PXF)[0]
print ""
sys.stdout = orig_stdout
f.close()
filename_csv = "result_purge_Query%s.csv" % currdate
f = open(r"WORK_PATH\\Purge\\Result\\%s" % filename_csv, 'wb')
try:
writer = csv.writer(f,delimiter=";")
writer.writerow( (u'Priority', 'Nombre MSISDN'))
for i in range(1,9):
PXQ = "P" + str(i) + "Q"
priority = "Priority %s" % i
writer.writerow((priority,eval(PXQ)[0]))
finally:
f.close()
filename_csv = "result_purge_Flag%s.csv" % currdate
f = open(r"WORK_PATH\\Purge\\Result\\%s" % filename_csv, 'wb')
try:
writer = csv.writer(f,delimiter=";")
writer.writerow((u'Priority', 'Number of MSISDN'))
for i in range(1,9):
PXF = "P" + str(i) + "F"
priority = "Priority %s" % i
writer.writerow( (priority,eval(PXF)[0]))
finally:
f.close() | 25.043189 | 191 | 0.645662 | 1,132 | 7,538 | 4.180212 | 0.165194 | 0.042265 | 0.067625 | 0.079882 | 0.649831 | 0.649831 | 0.637574 | 0.618555 | 0.573331 | 0.573331 | 0 | 0.035614 | 0.19541 | 7,538 | 301 | 192 | 25.043189 | 0.7446 | 0.019634 | 0 | 0.656716 | 0 | 0.003731 | 0.617036 | 0.110468 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.014925 | null | null | 0.048507 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5b0182a311cd9dfac53899e05b69e434377995e0 | 230 | py | Python | pymatting/__about__.py | Hello-World-Test12/pymatting | 424ebd4b8b4709793651f373c7543e137b1496a9 | [
"MIT"
] | null | null | null | pymatting/__about__.py | Hello-World-Test12/pymatting | 424ebd4b8b4709793651f373c7543e137b1496a9 | [
"MIT"
] | null | null | null | pymatting/__about__.py | Hello-World-Test12/pymatting | 424ebd4b8b4709793651f373c7543e137b1496a9 | [
"MIT"
] | null | null | null | __title__ = "PyMatting"
__version__ = "1.1.3"
__author__ = "The PyMatting Developers"
__email__ = "pymatting@gmail.com"
__license__ = "MIT"
__uri__ = "https://pymatting.github.io"
__summary__ = "Python package for alpha matting."
| 28.75 | 49 | 0.747826 | 27 | 230 | 5.333333 | 0.851852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014778 | 0.117391 | 230 | 7 | 50 | 32.857143 | 0.694581 | 0 | 0 | 0 | 0 | 0 | 0.521739 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
5b0cce047c52f68b59d60028cfcf10657ee9b8fa | 9,450 | py | Python | lib/luxi.py | modulus-sa/ganeti | 592c0e945cc2c7b0013f813ea8c9d8ec0d5bab98 | [
"BSD-2-Clause"
] | 396 | 2015-01-22T11:44:32.000Z | 2022-03-31T14:14:29.000Z | lib/luxi.py | modulus-sa/ganeti | 592c0e945cc2c7b0013f813ea8c9d8ec0d5bab98 | [
"BSD-2-Clause"
] | 1,550 | 2015-04-05T09:53:50.000Z | 2022-03-28T17:42:20.000Z | lib/luxi.py | modulus-sa/ganeti | 592c0e945cc2c7b0013f813ea8c9d8ec0d5bab98 | [
"BSD-2-Clause"
] | 119 | 2015-01-06T21:37:15.000Z | 2022-03-07T06:36:26.000Z | #
#
# Copyright (C) 2006, 2007, 2011, 2012, 2013, 2014 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module for the LUXI protocol
This module implements the local unix socket protocol. You only need
this module and the opcodes module in the client program in order to
communicate with the master.
The module is also used by the master daemon.
"""
from ganeti import constants
from ganeti import pathutils
from ganeti import objects
import ganeti.rpc.client as cl
from ganeti.rpc.errors import RequestError
from ganeti.rpc.transport import Transport
__all__ = [
# classes:
"Client"
]
REQ_SUBMIT_JOB = constants.LUXI_REQ_SUBMIT_JOB
REQ_SUBMIT_JOB_TO_DRAINED_QUEUE = constants.LUXI_REQ_SUBMIT_JOB_TO_DRAINED_QUEUE
REQ_SUBMIT_MANY_JOBS = constants.LUXI_REQ_SUBMIT_MANY_JOBS
REQ_PICKUP_JOB = constants.LUXI_REQ_PICKUP_JOB
REQ_WAIT_FOR_JOB_CHANGE = constants.LUXI_REQ_WAIT_FOR_JOB_CHANGE
REQ_CANCEL_JOB = constants.LUXI_REQ_CANCEL_JOB
REQ_ARCHIVE_JOB = constants.LUXI_REQ_ARCHIVE_JOB
REQ_CHANGE_JOB_PRIORITY = constants.LUXI_REQ_CHANGE_JOB_PRIORITY
REQ_AUTO_ARCHIVE_JOBS = constants.LUXI_REQ_AUTO_ARCHIVE_JOBS
REQ_QUERY = constants.LUXI_REQ_QUERY
REQ_QUERY_FIELDS = constants.LUXI_REQ_QUERY_FIELDS
REQ_QUERY_JOBS = constants.LUXI_REQ_QUERY_JOBS
REQ_QUERY_FILTERS = constants.LUXI_REQ_QUERY_FILTERS
REQ_REPLACE_FILTER = constants.LUXI_REQ_REPLACE_FILTER
REQ_DELETE_FILTER = constants.LUXI_REQ_DELETE_FILTER
REQ_QUERY_INSTANCES = constants.LUXI_REQ_QUERY_INSTANCES
REQ_QUERY_NODES = constants.LUXI_REQ_QUERY_NODES
REQ_QUERY_GROUPS = constants.LUXI_REQ_QUERY_GROUPS
REQ_QUERY_NETWORKS = constants.LUXI_REQ_QUERY_NETWORKS
REQ_QUERY_EXPORTS = constants.LUXI_REQ_QUERY_EXPORTS
REQ_QUERY_CONFIG_VALUES = constants.LUXI_REQ_QUERY_CONFIG_VALUES
REQ_QUERY_CLUSTER_INFO = constants.LUXI_REQ_QUERY_CLUSTER_INFO
REQ_QUERY_TAGS = constants.LUXI_REQ_QUERY_TAGS
REQ_SET_DRAIN_FLAG = constants.LUXI_REQ_SET_DRAIN_FLAG
REQ_SET_WATCHER_PAUSE = constants.LUXI_REQ_SET_WATCHER_PAUSE
REQ_ALL = constants.LUXI_REQ_ALL
DEF_RWTO = constants.LUXI_DEF_RWTO
WFJC_TIMEOUT = constants.LUXI_WFJC_TIMEOUT
class Client(cl.AbstractClient):
"""High-level client implementation.
This uses a backing Transport-like class on top of which it
implements data serialization/deserialization.
"""
def __init__(self, address=None, timeouts=None, transport=Transport):
"""Constructor for the Client class.
Arguments are the same as for L{AbstractClient}.
"""
super(Client, self).__init__(timeouts, transport)
# Override the version of the protocol:
self.version = constants.LUXI_VERSION
# Store the socket address
if address is None:
address = pathutils.QUERY_SOCKET
self.address = address
self._InitTransport()
def _GetAddress(self):
return self.address
def SetQueueDrainFlag(self, drain_flag):
return self.CallMethod(REQ_SET_DRAIN_FLAG, (drain_flag, ))
def SetWatcherPause(self, until):
return self.CallMethod(REQ_SET_WATCHER_PAUSE, (until, ))
def PickupJob(self, job):
return self.CallMethod(REQ_PICKUP_JOB, (job,))
def SubmitJob(self, ops):
ops_state = [op.__getstate__()
if not isinstance(op, objects.ConfigObject)
else op.ToDict(_with_private=True)
for op in ops]
return self.CallMethod(REQ_SUBMIT_JOB, (ops_state, ))
def SubmitJobToDrainedQueue(self, ops):
ops_state = [op.__getstate__() for op in ops]
return self.CallMethod(REQ_SUBMIT_JOB_TO_DRAINED_QUEUE, (ops_state, ))
def SubmitManyJobs(self, jobs):
jobs_state = []
for ops in jobs:
jobs_state.append([op.__getstate__() for op in ops])
return self.CallMethod(REQ_SUBMIT_MANY_JOBS, (jobs_state, ))
@staticmethod
def _PrepareJobId(request_name, job_id):
try:
return int(job_id)
except ValueError:
raise RequestError("Invalid parameter passed to %s as job id: "
" expected integer, got value %s" %
(request_name, job_id))
def CancelJob(self, job_id, kill=False):
job_id = Client._PrepareJobId(REQ_CANCEL_JOB, job_id)
return self.CallMethod(REQ_CANCEL_JOB, (job_id, kill))
def ArchiveJob(self, job_id):
job_id = Client._PrepareJobId(REQ_ARCHIVE_JOB, job_id)
return self.CallMethod(REQ_ARCHIVE_JOB, (job_id, ))
def ChangeJobPriority(self, job_id, priority):
job_id = Client._PrepareJobId(REQ_CHANGE_JOB_PRIORITY, job_id)
return self.CallMethod(REQ_CHANGE_JOB_PRIORITY, (job_id, priority))
def AutoArchiveJobs(self, age):
timeout = (DEF_RWTO - 1) // 2
return self.CallMethod(REQ_AUTO_ARCHIVE_JOBS, (age, timeout))
def WaitForJobChangeOnce(self, job_id, fields,
prev_job_info, prev_log_serial,
timeout=WFJC_TIMEOUT):
"""Waits for changes on a job.
@param job_id: Job ID
@type fields: list
@param fields: List of field names to be observed
@type prev_job_info: None or list
@param prev_job_info: Previously received job information
@type prev_log_serial: None or int/long
@param prev_log_serial: Highest log serial number previously received
@type timeout: int/float
@param timeout: Timeout in seconds (values larger than L{WFJC_TIMEOUT} will
be capped to that value)
"""
assert timeout >= 0, "Timeout can not be negative"
return self.CallMethod(REQ_WAIT_FOR_JOB_CHANGE,
(job_id, fields, prev_job_info,
prev_log_serial,
min(WFJC_TIMEOUT, timeout)))
def WaitForJobChange(self, job_id, fields, prev_job_info, prev_log_serial):
job_id = Client._PrepareJobId(REQ_WAIT_FOR_JOB_CHANGE, job_id)
while True:
result = self.WaitForJobChangeOnce(job_id, fields,
prev_job_info, prev_log_serial)
if result != constants.JOB_NOTCHANGED:
break
return result
def Query(self, what, fields, qfilter):
"""Query for resources/items.
@param what: One of L{constants.QR_VIA_LUXI}
@type fields: List of strings
@param fields: List of requested fields
@type qfilter: None or list
@param qfilter: Query filter
@rtype: L{objects.QueryResponse}
"""
result = self.CallMethod(REQ_QUERY, (what, fields, qfilter))
return objects.QueryResponse.FromDict(result)
def QueryFields(self, what, fields):
"""Query for available fields.
@param what: One of L{constants.QR_VIA_LUXI}
@type fields: None or list of strings
@param fields: List of requested fields
@rtype: L{objects.QueryFieldsResponse}
"""
result = self.CallMethod(REQ_QUERY_FIELDS, (what, fields))
return objects.QueryFieldsResponse.FromDict(result)
def QueryJobs(self, job_ids, fields):
return self.CallMethod(REQ_QUERY_JOBS, (job_ids, fields))
def QueryFilters(self, uuids, fields):
return self.CallMethod(REQ_QUERY_FILTERS, (uuids, fields))
def ReplaceFilter(self, uuid, priority, predicates, action, reason):
return self.CallMethod(REQ_REPLACE_FILTER,
(uuid, priority, predicates, action, reason))
def DeleteFilter(self, uuid):
return self.CallMethod(REQ_DELETE_FILTER, (uuid, ))
def QueryInstances(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_INSTANCES, (names, fields, use_locking))
def QueryNodes(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_NODES, (names, fields, use_locking))
def QueryGroups(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_GROUPS, (names, fields, use_locking))
def QueryNetworks(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_NETWORKS, (names, fields, use_locking))
def QueryExports(self, nodes, use_locking):
return self.CallMethod(REQ_QUERY_EXPORTS, (nodes, use_locking))
def QueryClusterInfo(self):
return self.CallMethod(REQ_QUERY_CLUSTER_INFO, ())
def QueryConfigValues(self, fields):
return self.CallMethod(REQ_QUERY_CONFIG_VALUES, (fields, ))
def QueryTags(self, kind, name):
return self.CallMethod(REQ_QUERY_TAGS, (kind, name))
| 37.204724 | 80 | 0.74381 | 1,290 | 9,450 | 5.184496 | 0.251938 | 0.043062 | 0.062201 | 0.079097 | 0.273325 | 0.184809 | 0.143242 | 0.12186 | 0.12186 | 0.097937 | 0 | 0.003744 | 0.180423 | 9,450 | 253 | 81 | 37.351779 | 0.859781 | 0.290476 | 0 | 0 | 0 | 0 | 0.01624 | 0 | 0 | 0 | 0 | 0 | 0.007463 | 1 | 0.216418 | false | 0.007463 | 0.044776 | 0.119403 | 0.477612 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
5b13841ffdd490d53bb5913f2c10e95fe08758b7 | 2,505 | py | Python | hubcheck/pageobjects/widgets/resources_new.py | codedsk/hubcheck | 2ff506eb56ba00f035300862f8848e4168452a17 | [
"MIT"
] | 1 | 2016-02-13T13:42:23.000Z | 2016-02-13T13:42:23.000Z | hubcheck/pageobjects/widgets/resources_new.py | codedsk/hubcheck | 2ff506eb56ba00f035300862f8848e4168452a17 | [
"MIT"
] | null | null | null | hubcheck/pageobjects/widgets/resources_new.py | codedsk/hubcheck | 2ff506eb56ba00f035300862f8848e4168452a17 | [
"MIT"
] | null | null | null | from hubcheck.pageobjects.basepagewidget import BasePageWidget
from hubcheck.pageobjects.basepageelement import Link
class ResourcesNew(BasePageWidget):
def __init__(self, owner, locatordict={}):
super(ResourcesNew,self).__init__(owner,locatordict)
# load hub's classes
ResourcesNew_Locators = self.load_class('ResourcesNew_Locators')
ResourcesCategoryBrowser = self.load_class('ResourcesCategoryBrowser')
# update this object's locator
self.locators.update(ResourcesNew_Locators.locators)
# update the locators with those from the owner
self.update_locators_from_owner()
# setup page object's components
self.create = Link(self,{'base':'create'})
self.legal_cc = Link(self,{'base':'legal_cc'})
self.legal_license = Link(self,{'base':'legal_license'})
self.file_ticket = Link(self,{'base':'file_ticket'})
self.catbrowser = ResourcesCategoryBrowser(self,{'base':'catbrowser'})
# update the component's locators with this objects overrides
self._updateLocators()
def goto_create(self):
"""click the resource create link"""
return self.create.click()
def goto_legal_cc(self):
"""click on the legal creative commons link"""
return self.legal_cc.click()
def goto_legal_license(self):
"""click on the legal license link"""
return self.legal_license.click()
def goto_file_ticket(self):
"""click on the file ticket link"""
return self.file_ticket.click()
def goto_category_by_title(self,category):
"""click on a category title"""
return self.catbrowser.goto_category_by_title(category)
def get_category_titles(self):
"""return a list of category titles"""
return self.catbrowser.get_category_titles()
def get_category_classes(self):
"""return a list of category classes"""
return self.catbrowser.get_category_classes()
class ResourcesNew_Locators_Base(object):
"""locators for ResourcesNew object"""
locators = {
'base' : "css=#content",
'create' : "css=#getstarted a",
'legal_cc' : "xpath=//a[contains(text(),'Creative Commons 3')]",
'legal_license' : "xpath=//a[contains(text(),'more details')]",
'file_ticket' : "xpath=//a[contains(text(),'file a trouble report')]",
'catbrowser' : "css=#content",
}
| 30.54878 | 82 | 0.648303 | 281 | 2,505 | 5.594306 | 0.24911 | 0.044529 | 0.030534 | 0.026718 | 0.09542 | 0.031807 | 0 | 0 | 0 | 0 | 0 | 0.000521 | 0.233533 | 2,505 | 81 | 83 | 30.925926 | 0.818229 | 0.177645 | 0 | 0 | 0 | 0 | 0.172208 | 0.070471 | 0 | 0 | 0 | 0 | 0 | 1 | 0.210526 | false | 0 | 0.052632 | 0 | 0.526316 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
5b241b4df4ec0a789b0c0997af52b8f4efdad04b | 2,033 | py | Python | starfish/image/_filter/__init__.py | vipulsinghal02/starfish | c3d347954ad40a7a4be9a50d89974f5fbbc2919d | [
"MIT"
] | null | null | null | starfish/image/_filter/__init__.py | vipulsinghal02/starfish | c3d347954ad40a7a4be9a50d89974f5fbbc2919d | [
"MIT"
] | null | null | null | starfish/image/_filter/__init__.py | vipulsinghal02/starfish | c3d347954ad40a7a4be9a50d89974f5fbbc2919d | [
"MIT"
] | null | null | null | import argparse
from typing import Type
from starfish.imagestack.imagestack import ImageStack
from starfish.pipeline import AlgorithmBase, PipelineComponent
from starfish.util.argparse import FsExistsType
from . import _base
from . import bandpass
from . import clip
from . import gaussian_high_pass
from . import gaussian_low_pass
from . import mean_high_pass
from . import richardson_lucy_deconvolution
from . import scale_by_percentile
from . import white_tophat
from . import zero_by_channel_magnitude
class Filter(PipelineComponent):
filter_group: argparse.ArgumentParser
@classmethod
def _get_algorithm_base_class(cls) -> Type[AlgorithmBase]:
return _base.FilterAlgorithmBase
@classmethod
def _add_to_parser(cls, subparsers):
"""Adds the filter component to the CLI argument parser."""
filter_group = subparsers.add_parser("filter")
filter_group.add_argument("-i", "--input", type=FsExistsType(), required=True)
filter_group.add_argument("-o", "--output", required=True)
filter_group.set_defaults(starfish_command=Filter._cli)
filter_subparsers = filter_group.add_subparsers(dest="filter_algorithm_class")
for algorithm_cls in cls._algorithm_to_class_map().values():
group_parser = filter_subparsers.add_parser(algorithm_cls._get_algorithm_name())
group_parser.set_defaults(filter_algorithm_class=algorithm_cls)
algorithm_cls._add_arguments(group_parser)
cls.filter_group = filter_group
@classmethod
def _cli(cls, args, print_help=False):
"""Runs the filter component based on parsed arguments."""
if args.filter_algorithm_class is None or print_help:
cls.filter_group.print_help()
cls.filter_group.exit(status=2)
print('Filtering images ...')
stack = ImageStack.from_path_or_url(args.input)
instance = args.filter_algorithm_class(**vars(args))
output = instance.run(stack)
output.write(args.output)
| 36.303571 | 92 | 0.73635 | 249 | 2,033 | 5.710843 | 0.365462 | 0.070323 | 0.056259 | 0.025316 | 0.032349 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000601 | 0.181997 | 2,033 | 55 | 93 | 36.963636 | 0.85448 | 0.05214 | 0 | 0.071429 | 0 | 0 | 0.034969 | 0.011482 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0.095238 | 0.357143 | 0.02381 | 0.5 | 0.095238 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 2 |
d28e0c06c113ef2d6bddd8bbe67d8e0c5ef91beb | 2,930 | py | Python | inference/data/custom_voc.py | TAViT2022/TAViT | 6ea42150c57af0e0618675565440df85121cf50a | [
"Apache-2.0"
] | null | null | null | inference/data/custom_voc.py | TAViT2022/TAViT | 6ea42150c57af0e0618675565440df85121cf50a | [
"Apache-2.0"
] | null | null | null | inference/data/custom_voc.py | TAViT2022/TAViT | 6ea42150c57af0e0618675565440df85121cf50a | [
"Apache-2.0"
] | null | null | null | import os
import tarfile
import collections
from torchvision.datasets.vision import VisionDataset
import xml.etree.ElementTree as ET
from PIL import Image
from typing import Any, Callable, Dict, Optional, Tuple, List
from torchvision.datasets.utils import download_and_extract_archive, verify_str_arg
import warnings
import copy
import numpy as np
import torch
import random
from .voc_dataloader import _VOCBase
class CustomVOC(_VOCBase):
def __init__(self, root, image_set, transform, target_transform):
super(CustomVOC, self).__init__(root=root,
image_set=image_set,
transform=transform,
target_transform=target_transform)
"""`Pascal VOC <http://host.robots.ox.ac.uk/pascal/VOC/>`_ Segmentation Dataset.
Args:
root (string): Root directory of the VOC Dataset.
year (string, optional): The dataset year, supports years ``"2007"`` to ``"2012"``.
image_set (string, optional): Select the image_set to use, ``"train"``, ``"trainval"`` or ``"val"``. If
``year=="2007"``, can also be ``"test"``.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
transforms (callable, optional): A function/transform that takes input sample and its target as entry
and returns a transformed version.
"""
_SPLITS_DIR = "Segmentation"
_TARGET_DIR = "SegmentationClass"
_TARGET_FILE_EXT = ".png"
# @property
# def masks(self) -> List[str]:
# return self.targets
def __getitem__(self, index: int) -> Tuple[Any, Any]:
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is the image segmentation.
"""
img = Image.open(self.images[index]).convert("RGB")
target = copy.deepcopy(img)
img_path = self.images[index]
seed = np.random.randint(2147483647) # make a seed with numpy generator
if self.transform is not None:
random.seed(seed) # apply this seed to img tranfsorms
torch.manual_seed(seed) # needed for torchvision 0.7
img = self.transform(img)
if self.target_transform is not None:
random.seed(seed) # apply this seed to img tranfsorms
torch.manual_seed(seed) # needed for torchvision 0.7
target = self.target_transform(target)
return img, target, img_path | 39.594595 | 111 | 0.63959 | 356 | 2,930 | 5.143258 | 0.407303 | 0.049153 | 0.039323 | 0.040961 | 0.224468 | 0.192791 | 0.192791 | 0.169306 | 0.169306 | 0.110322 | 0 | 0.012184 | 0.271672 | 2,930 | 74 | 112 | 39.594595 | 0.845829 | 0.109898 | 0 | 0.108108 | 0 | 0 | 0.024161 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0 | 0.378378 | 0 | 0.567568 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
d29ad5496f6bd24b23993b78eed47766d1f82d23 | 2,047 | py | Python | plugins/forcesub.py | Hydrayt777/Image-Editor-Bot | 76ec325e8897214b15ccc54a37115c3b2eee95c9 | [
"MIT"
] | 3 | 2021-05-17T19:12:04.000Z | 2021-12-05T11:01:01.000Z | plugins/forcesub.py | Hydrayt777/Image-Editor-Bot | 76ec325e8897214b15ccc54a37115c3b2eee95c9 | [
"MIT"
] | null | null | null | plugins/forcesub.py | Hydrayt777/Image-Editor-Bot | 76ec325e8897214b15ccc54a37115c3b2eee95c9 | [
"MIT"
] | 5 | 2021-06-13T21:46:47.000Z | 2021-12-28T04:57:05.000Z | import logging
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
import os
import time
# the secret configuration specific things
if bool(os.environ.get("WEBHOOK", False)):
from sample_config import Config
else:
from config import Config
# the Strings used for this "thing"
from translation import Translation
from pyrogram.types import ReplyKeyboardMarkup,InlineKeyboardMarkup,InlineKeyboardButton
from pyrogram.errors import UserNotParticipant, UserBannedInChannel
import pyrogram
logging.getLogger("pyrogram").setLevel(logging.WARNING)
from pyrogram import Client, filters
from helper_funcs.chat_base import TRChatBase
from helper_funcs.display_progress import progress_for_pyrogram
from hachoir.metadata import extractMetadata
from hachoir.parser import createParser
# https://stackoverflow.com/a/37631799/4723940
from PIL import Image
from database.database import *
@pyrogram.Client.on_message(pyrogram.filters.command(["photo"]))
if update.from_user.id in Config.BANNED_USERS:
then,
bot.send_message(
chat_id=update.chat.id,
text=Translation.BANNED_USER_TEXT,
reply_to_message_id=update.message_id
)
return
TRChatBase(update.from_user.id, update.text, "photo")
update_channel = Config.UPDATE_CHANNEL
if update_channel:
try:
user = await bot.get_chat_member(update_channel, update.chat.id)
if user.status == "kicked":
await update.reply_text("🤭 Sorry Dude, You are **B A N N E D 🤣🤣🤣**")
return
except UserNotParticipant:
#await update.reply_text(f"Join @{update_channel} To Use Me")
await update.reply_text(
text="**Join My Updates Channel to use ME 😎 🤭**",
reply_markup=InlineKeyboardMarkup([
[ InlineKeyboardButton(text="Join My Updates Channel", url=f"https://t.me/{update_channel}")]
])
)
return
except Exception:
await update.reply_text("Something Wrong. Contact my Support Group")
return
| 32.492063 | 103 | 0.744504 | 267 | 2,047 | 5.595506 | 0.441948 | 0.052209 | 0.042838 | 0.053548 | 0.032129 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008726 | 0.160234 | 2,047 | 62 | 104 | 33.016129 | 0.856894 | 0.087445 | 0 | 0.08 | 0 | 0 | 0.138486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.32 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
d29bb2fd2c6a51e6186794b210ee001889bc8ae9 | 848 | py | Python | db_connection.py | Brcinko/pdt_map | 7d26412eddc3e876782b0dfd92cc2a81d37583cd | [
"MIT"
] | null | null | null | db_connection.py | Brcinko/pdt_map | 7d26412eddc3e876782b0dfd92cc2a81d37583cd | [
"MIT"
] | null | null | null | db_connection.py | Brcinko/pdt_map | 7d26412eddc3e876782b0dfd92cc2a81d37583cd | [
"MIT"
] | null | null | null | """
db_connection.py, author: Lukas Klescinec <lukas.klescinec@gmail.com>
FIIT Slovak University of Technology 2016
This file is part of school project on lesson Advanced Databases.
"""
import psycopg2
from settings import HOSTNAME, USERNAME, PASSWORD, DB_NAME
def open_connection():
print "Trying to connect to a database."
try:
db_conn = psycopg2.connect(host=HOSTNAME, user=USERNAME, password=PASSWORD, dbname=DB_NAME)
return db_conn
except psycopg2.DatabaseError as e:
raise e.pgerror
def close_connection(conn):
print "Closing connection to database."
try:
conn.close()
except psycopg2.DatabaseError as e:
raise e.message
def execute_query(conn, query):
cursor = conn.cursor()
cursor.execute(query)
records = cursor.fetchall()
return records
| 24.228571 | 99 | 0.70283 | 108 | 848 | 5.444444 | 0.555556 | 0.047619 | 0.091837 | 0.098639 | 0.122449 | 0.122449 | 0.122449 | 0 | 0 | 0 | 0 | 0.012066 | 0.21816 | 848 | 34 | 100 | 24.941176 | 0.874811 | 0 | 0 | 0.2 | 0 | 0 | 0.096923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.1 | 0.1 | null | null | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
d29d477e792df99677c46cce08497ad31358bc4f | 308 | py | Python | src/lib/system/system.py | cryptosbyte/GRX | 3fee441d32ab93b2f8f49eb47d6fe7c4f23e6321 | [
"Apache-2.0"
] | 1 | 2021-06-20T20:43:08.000Z | 2021-06-20T20:43:08.000Z | src/lib/system/system.py | cryptosbyte/GRX | 3fee441d32ab93b2f8f49eb47d6fe7c4f23e6321 | [
"Apache-2.0"
] | null | null | null | src/lib/system/system.py | cryptosbyte/GRX | 3fee441d32ab93b2f8f49eb47d6fe7c4f23e6321 | [
"Apache-2.0"
] | null | null | null | from os import get_terminal_size, name, system
def clear() -> None:
"""Clears screen for most operating systems: Windows, Linux & Mac (Posix) """
if name == 'nt':
_ = system('cls')
else:
_ = system('clear')
def terminal_width() -> int:
return get_terminal_size()[0] | 20.533333 | 81 | 0.597403 | 38 | 308 | 4.657895 | 0.763158 | 0.124294 | 0.169492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004405 | 0.262987 | 308 | 15 | 82 | 20.533333 | 0.77533 | 0.227273 | 0 | 0 | 0 | 0 | 0.043103 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.125 | 0.125 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
d2a15313f2547cfc25308f9c39dbe1c3b32f52e8 | 492 | py | Python | Python/owo.py | PushpneetSingh/Hello-world | def0f44737e02fb40063cd347e93e456658e2532 | [
"MIT"
] | 1,428 | 2018-10-03T15:15:17.000Z | 2019-03-31T18:38:36.000Z | Python/owo.py | PushpneetSingh/Hello-world | def0f44737e02fb40063cd347e93e456658e2532 | [
"MIT"
] | 1,162 | 2018-10-03T15:05:49.000Z | 2018-10-18T14:17:52.000Z | Python/owo.py | PushpneetSingh/Hello-world | def0f44737e02fb40063cd347e93e456658e2532 | [
"MIT"
] | 3,909 | 2018-10-03T15:07:19.000Z | 2019-03-31T18:39:08.000Z | import time
print("OwO What's This!!!")
print("Here's my favorite activity in Japoneeese: プログラミング (yes google translate is good)")
furry = "Rawr x3 nuzzles how are you pounces on you you're so warm o3o notices you have a bulge o: someone's happy ;) nuzzles your necky wecky~ murr~ hehehe rubbies your bulgy wolgy you're so big :oooo rubbies more on your bulgy wolgy it doesn't stop growing -///- kisses you and lickies you"
furry = furry.split()
for f in furry:
time.sleep(0.1)
print(f)
| 49.2 | 292 | 0.735772 | 89 | 492 | 4.067416 | 0.719101 | 0.027624 | 0.038674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009828 | 0.172764 | 492 | 9 | 293 | 54.666667 | 0.879607 | 0 | 0 | 0 | 0 | 0.125 | 0.77439 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.125 | 0 | 0.125 | 0.375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d2a59ba317d020f4c532070904c85fbd9ea4db6e | 683 | py | Python | examples/simple.py | cdumay/flask-tat | 94a1cbee2e4be424eefc9009004df819e90c2b32 | [
"Apache-2.0"
] | null | null | null | examples/simple.py | cdumay/flask-tat | 94a1cbee2e4be424eefc9009004df819e90c2b32 | [
"Apache-2.0"
] | null | null | null | examples/simple.py | cdumay/flask-tat | 94a1cbee2e4be424eefc9009004df819e90c2b32 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. codeauthor:: Cédric Dumay <cedric.dumay@gmail.com>
"""
import os
from flask import Flask
from flask_tat.engine import TATClient
os.environ.setdefault('TAT_URL', 'http://127.0.0.1')
os.environ.setdefault('TAT_USERNAME', 'test')
os.environ.setdefault('TAT_PASSWORD', 'test')
os.environ.setdefault("TAT_TOPIC", "MyTopic")
app = Flask(__name__)
app.config.update(dict(
TAT_URL=os.getenv('TAT_URL'),
TAT_USERNAME=os.getenv('TAT_USERNAME'),
TAT_PASSWORD=os.getenv('TAT_PASSWORD'),
))
app.app_context().push()
tat_client = TATClient(app)
print(tat_client.message_list(
topic=os.getenv('TAT_TOPIC'),
limit=5
))
| 21.34375 | 53 | 0.708638 | 99 | 683 | 4.686869 | 0.464646 | 0.077586 | 0.163793 | 0.189655 | 0.112069 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013158 | 0.10981 | 683 | 31 | 54 | 22.032258 | 0.75 | 0.140556 | 0 | 0.105263 | 0 | 0 | 0.192708 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.105263 | 0.157895 | 0 | 0.157895 | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
d2a8bf2cf7dbb16cde4730eed0e7eb82caef2627 | 672 | py | Python | plugins/vsphere/resoto_plugin_vsphere/config.py | someengineering/resoto | ee17313f5376e9797ed305e7fdb62d40139a6608 | [
"Apache-2.0"
] | 126 | 2022-01-13T18:22:03.000Z | 2022-03-31T11:03:14.000Z | plugins/vsphere/resoto_plugin_vsphere/config.py | someengineering/resoto | ee17313f5376e9797ed305e7fdb62d40139a6608 | [
"Apache-2.0"
] | 110 | 2022-01-13T22:27:55.000Z | 2022-03-30T22:26:50.000Z | plugins/vsphere/resoto_plugin_vsphere/config.py | someengineering/resoto | ee17313f5376e9797ed305e7fdb62d40139a6608 | [
"Apache-2.0"
] | 8 | 2022-01-15T10:28:16.000Z | 2022-03-30T16:38:21.000Z | from dataclasses import dataclass, field
from typing import ClassVar, Optional
@dataclass
class VSphereConfig:
kind: ClassVar[str] = "vsphere"
user: Optional[str] = field(default=None, metadata={"description": "User name"})
password: Optional[str] = field(default=None, metadata={"description": "Password"})
host: Optional[str] = field(
default=None, metadata={"description": "Host name/address"}
)
port: int = field(default=443, metadata={"description": "TCP port"})
insecure: bool = field(
default=True,
metadata={
"description": "Allow insecure connection. Do not verify certificates."
},
)
| 33.6 | 87 | 0.657738 | 70 | 672 | 6.314286 | 0.5 | 0.135747 | 0.108597 | 0.156109 | 0.312217 | 0.312217 | 0.312217 | 0 | 0 | 0 | 0 | 0.005639 | 0.208333 | 672 | 19 | 88 | 35.368421 | 0.825188 | 0 | 0 | 0 | 0 | 0 | 0.235119 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.058824 | 0.117647 | 0 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
d2ae576335ce3d8138cbe33d9a81a6f07bea2a8d | 727 | py | Python | pyga/candidate_factory/bit_string.py | Eyjafjallajokull/pyga | b7560aca4349b5973a359eb3629b6c12c0d728b2 | [
"MIT"
] | null | null | null | pyga/candidate_factory/bit_string.py | Eyjafjallajokull/pyga | b7560aca4349b5973a359eb3629b6c12c0d728b2 | [
"MIT"
] | null | null | null | pyga/candidate_factory/bit_string.py | Eyjafjallajokull/pyga | b7560aca4349b5973a359eb3629b6c12c0d728b2 | [
"MIT"
] | null | null | null | from .candidate_factory import CandidateFactory
from ..candidate import Candidate
class BitStringFactory(CandidateFactory):
"""
Generates candidates with data represented by fixed-length binary strings.
:param random: Random number generator
:param size: int length of strings to generate
"""
def __init__(self, random, size):
super().__init__(random)
self.size = size
def create_candidate(self):
"""
Creates Candidate instance and randomly generates its bit string data.
:return: Candidate
"""
candidate = Candidate()
candidate.data = ''.join([str(round(self.random.float())) for _ in range(self.size)])
return candidate
| 29.08 | 93 | 0.669876 | 79 | 727 | 6.025316 | 0.582278 | 0.113445 | 0.113445 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.240715 | 727 | 24 | 94 | 30.291667 | 0.862319 | 0.34663 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
d2b09a89ed20ce21e305466b8fcc1f7b2cc8ff34 | 5,812 | py | Python | startup/users/30-user-Kang.py | mrakitin/profile_collection-smi | 1eea45a3b886b2c0daeec715ce94f27da24d0ba3 | [
"BSD-3-Clause"
] | null | null | null | startup/users/30-user-Kang.py | mrakitin/profile_collection-smi | 1eea45a3b886b2c0daeec715ce94f27da24d0ba3 | [
"BSD-3-Clause"
] | 13 | 2018-09-25T19:35:08.000Z | 2021-01-15T20:42:26.000Z | startup/users/30-user-Kang.py | mrakitin/profile_collection-smi | 1eea45a3b886b2c0daeec715ce94f27da24d0ba3 | [
"BSD-3-Clause"
] | 3 | 2019-09-06T01:40:59.000Z | 2020-07-01T20:27:39.000Z |
def rotation_saxs(t = 1):
#sample = ['Hopper2_AGIB_AuPd_top', 'Hopper2_AGIB_AuPd_mid', 'Hopper2_AGIB_AuPd_bot'] #Change filename
sample = ['AGIB3N_1top', 'AGIB3N_1mid', 'AGIB3N_1cen'] #Change filename
#y_list = [-6.06, -6.04, -6.02] #hexapod is in mm
#y_list = [-10320, -10300, -10280] #SmarAct is um
y_list = [4760, 4810, 4860]#, 5210] #SmarAct is um
assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})'
# Detectors, motors:
#dets = [pil1M, rayonix, pil300KW]
dets = [pil1M, pil300KW]
prs_range = [-90, 90, 91]
waxs_range = [0, 26, 5] #step of 6.5 degrees
det_exposure_time(t,t)
#pil_pos_x = [-0.4997, -0.4997 + 4.3, -0.4997 + 4.3, -0.4997]
#pil_pos_y = [-59.9987, -59.9987, -59.9987 + 4.3, -59.9987]
#waxs_po = np.linspace(20.95, 2.95, 4)
for sam, y in zip(sample, y_list):
#yield from bps.mv(stage.y, y) #hexapod
yield from bps.mv(piezo.y, y) #SmarAct
name_fmt = '{sam}'
sample_name = name_fmt.format(sam=sam)
sample_id(user_name='MK', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.grid_scan(dets, prs, *prs_range, waxs, *waxs_range, 1)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3, 0.3)
def rotation_saxs_fast(t = 1):
sample = ['AGIB3DR_2fast_top', 'AGIB3DR_2fast_mid', 'AGIB3DR_2fast_cen'] #Change filename
y_list = [5150, 5230, 5310] #SmarAct is um
assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})'
# Detectors, motors:
dets = [pil1M, pil300KW]
prs_range = np.linspace(-90, 90, 91)
waxs_range = np.linspace(0, 26, 5)
det_exposure_time(t,t)
for sam, y in zip(sample, y_list):
yield from bps.mv(piezo.y, y)
for wa in waxs_range:
yield from bps.mv(waxs, wa)
for pr in prs_range:
yield from bps.mv(prs, pr)
name_fmt = '{sam}_wa{waxs}deg_{prs}deg'
sample_name = name_fmt.format(sam=sam, waxs='%2.1f'%wa, prs='%3.3d'%pr)
sample_id(user_name='MK', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3, 0.3)
def rotation_saxs_att(t = 1): #attenuated WAXS, so SAXS recorded separately first
#sample = ['Disc3_AuPd_top-3', 'Disc3_AuPd_mid-3', 'Disc3_AuPd_bot-3'] #Change filename
sample = ['Hopper1_AGIB_AuPd_top','Hopper1_AGIB_AuPd_mid', 'Hopper1_AGIB_AuPd_bot'] #Change filename
#y_list = [-6.06, -6.04, -6.02] #hexapod is in mm
#y_list = [-10320, -10300, -10280] #SmarAct is um
y_list = [-9540, -9520, -9500] #SmarAct is um
assert len(y_list) == len(sample), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})'
# Detectors, motors:
#dets = [pil1M, rayonix, pil300KW]
dets0 = [pil1M]
dets = [pil300KW]
det_exposure_time(t,t)
pil_pos_x = [-0.4997, -0.4997 + 4.3, -0.4997 + 4.3, -0.4997]
pil_pos_y = [-59.9987, -59.9987, -59.9987 + 4.3, -59.9987]
waxs_po = np.linspace(20.95, 2.95, 4)
for sam, y in zip(sample, y_list):
#yield from bps.mv(stage.y, y) #hexapod
yield from bps.mv(piezo.y, y) #SmarAct
yield from bps.mv(waxs, 70)
for angle in range(-90, 91, 1):
yield from bps.mv(prs, angle)
name_fmt = '{sam}_phi{angle}deg'
sample_name = name_fmt.format(sam=sam, angle=angle)
sample_id(user_name='MK', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets0, num = 1)
yield from bps.mv(att1_5, 'Insert')
yield from bps.sleep(1)
yield from bps.mv(att1_6, 'Insert')
yield from bps.sleep(1)
for sam, y in zip(sample, y_list):
#yield from bps.mv(stage.y, y) #hexapod
yield from bps.mv(piezo.y, y) #SmarAct
for i, waxs_pos in enumerate(waxs_po):
yield from bps.mv(waxs, waxs_pos)
yield from bps.mv(pil1m_pos.x, pil_pos_x[i])
yield from bps.mv(pil1m_pos.y, pil_pos_y[i])
for angle in range(-90, 91, 1):
yield from bps.mv(prs, angle)
name_fmt = '{sam}_phi{angle}deg_{waxs_pos}deg'
sample_name = name_fmt.format(sam=sam, angle=angle, waxs_pos = waxs_pos)
sample_id(user_name='MK', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num = 1)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.5, 0.5)
yield from bps.mv(att1_5, 'Retract')
yield from bps.sleep(1)
yield from bps.mv(att1_6, 'Retract')
yield from bps.sleep(1)
yield from bps.mv(pil1m_pos.x, -0.4997)
yield from bps.mv(pil1m_pos.y, -59.9987)
| 23.530364 | 134 | 0.53424 | 824 | 5,812 | 3.586165 | 0.162621 | 0.088325 | 0.101523 | 0.099492 | 0.781726 | 0.712352 | 0.689002 | 0.650423 | 0.639594 | 0.639594 | 0 | 0.086238 | 0.33362 | 5,812 | 246 | 135 | 23.626016 | 0.676736 | 0.163971 | 0 | 0.46988 | 0 | 0 | 0.148194 | 0.025322 | 0 | 0 | 0 | 0 | 0.036145 | 1 | 0.036145 | false | 0 | 0 | 0 | 0.036145 | 0.048193 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d2b29f4a658536546344987ecefc94f777e0365a | 1,203 | py | Python | veriteos/utils.py | veriteos/veriteos-python | 63f59f49a9299847ba16fe37dc6118602c80633a | [
"MIT"
] | null | null | null | veriteos/utils.py | veriteos/veriteos-python | 63f59f49a9299847ba16fe37dc6118602c80633a | [
"MIT"
] | null | null | null | veriteos/utils.py | veriteos/veriteos-python | 63f59f49a9299847ba16fe37dc6118602c80633a | [
"MIT"
] | null | null | null | import uuid
import time
import hashlib
import json
def get_event_metadata():
return {
"run_id": str(uuid.uuid1()),
"event_id": str(uuid.uuid4())
}
# python does some pretty pretting to json objects, we have to change the
# separators to have a bare bone stringifying/dumping function, which will match other languages implementations
def generate_md5_hash_from_payload(payload:dict):
return hashlib.md5(json.dumps(payload, separators=(',', ':')).encode('utf-8')).hexdigest()
def generate_sha256_hash_from_payload(payload:dict):
return hashlib.sha256(json.dumps(payload, separators=(',', ':')).encode('utf-8')).hexdigest()
def enrich_valid_event(event, version, count):
metadata = get_event_metadata()
event['pipeline']['run_id'] = metadata['run_id']
event['event']['id'] = metadata['event_id']
event['data']['checksum_md5'] = generate_md5_hash_from_payload(event['data']['payload'])
event['data']['checksum_sha256'] = generate_sha256_hash_from_payload(event['data']['payload'])
event['reporter']['version'] = version
event['reporter']['sequence'] = count
event['reporter']['timestamp'] = round(time.time())
return event
| 32.513514 | 112 | 0.699086 | 153 | 1,203 | 5.30719 | 0.405229 | 0.039409 | 0.073892 | 0.046798 | 0.364532 | 0.302956 | 0.302956 | 0.118227 | 0.118227 | 0 | 0 | 0.01938 | 0.142145 | 1,203 | 36 | 113 | 33.416667 | 0.767442 | 0.151288 | 0 | 0 | 0 | 0 | 0.165029 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.173913 | false | 0 | 0.173913 | 0.130435 | 0.521739 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
d2c54878e4674bf9660909b66167550e662a5180 | 1,041 | py | Python | src/Command/server/AirTime.py | hxxyy/air-conditioning-system | 10d437b8e94301776ab415c53580578a3c2bc85b | [
"MIT"
] | 2 | 2018-03-19T05:44:29.000Z | 2018-04-18T14:04:47.000Z | src/Command/server/AirTime.py | hxxyy/air-conditioning-system | 10d437b8e94301776ab415c53580578a3c2bc85b | [
"MIT"
] | 6 | 2018-03-23T11:50:02.000Z | 2018-05-26T08:52:46.000Z | src/Command/server/AirTime.py | hxxyy/air-conditioning-system | 10d437b8e94301776ab415c53580578a3c2bc85b | [
"MIT"
] | null | null | null | from time import strftime, localtime
def NowTime():
return strftime("%m%d%H%M%S", localtime())
def TimeDiff(Timestring1, Timestring2): # Timestring1
m = int(Timestring1[0:2]) - int(Timestring2[0:2])
d = int(Timestring1[2:4]) - int(Timestring2[2:4])
h = int(Timestring1[4:6]) - int(Timestring2[4:6])
n = int(Timestring1[6:8]) - int(Timestring2[6:8])
s = int(Timestring1[8:10]) - int(Timestring2[8:10])
ans = m * 86400 * 30 + d * 86400 + h * 3600 + n * 60 + s
return ans
def dayegotime(Timestring1, num):
d = int(Timestring1[2:4])
return Timestring1[0:2] + str(d - num) + Timestring1[4:10]
def NowMonth():
return strftime("%m", localtime())
def NowDay():
return strftime("%d", localtime())
def NowHour():
return strftime("%H", localtime())
def NowMinute():
return strftime("%M", localtime())
def NowSecond():
return strftime("%S", localtime())
if __name__ == '__main__': # 非主程序不会运行
print(NowTime())
print(NowHour())
print(TimeDiff('0328142813', '0328151214'))
| 27.394737 | 62 | 0.630163 | 139 | 1,041 | 4.661871 | 0.309353 | 0.111111 | 0.069444 | 0.049383 | 0.135802 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099291 | 0.18732 | 1,041 | 37 | 63 | 28.135135 | 0.666667 | 0.019212 | 0 | 0 | 0 | 0 | 0.047151 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0.035714 | 0.214286 | 0.607143 | 0.107143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
d2c71d64db696bf8987f691b05d588a581ee2454 | 3,796 | py | Python | modules/dbnd/test_dbnd/scenarios/pipelines/pipelines_with_errors.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | 224 | 2020-01-02T10:46:37.000Z | 2022-03-02T13:54:08.000Z | modules/dbnd/test_dbnd/scenarios/pipelines/pipelines_with_errors.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | 16 | 2020-03-11T09:37:58.000Z | 2022-01-26T10:22:08.000Z | modules/dbnd/test_dbnd/scenarios/pipelines/pipelines_with_errors.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | 24 | 2020-03-24T13:53:50.000Z | 2022-03-22T11:55:18.000Z | import logging
from time import sleep
from dbnd import PipelineTask, band, output, parameter, pipeline, task
from dbnd.tasks.basics import SimplestTask
from dbnd.tasks.basics.simplest import SimpleTask
from dbnd_test_scenarios.test_common.task.factories import TTask
from targets import target
def raise_failure(failure):
if failure == "missing_params":
return TTaskMissingParamsMultiple()
elif failure == "read_error":
return target("not_exists").read()
else:
raise Exception("just an error")
def raise_2(failure):
raise_failure(failure)
def raise_3(failure):
raise_2(failure)
class ETaskFails(SimplestTask):
def run(self):
raise TypeError("Some user error")
class EPipeWithTaskFails(PipelineTask):
def band(self):
return ETaskFails()
class ENoAssignedOutput(PipelineTask):
t1 = output
t2 = output
t_pipeline = output
def band(self):
self.t1 = SimplestTask()
# t2 is missing
# self.t2 = SimplestTask()
class EWrongOutputAssignment(PipelineTask):
t1 = output
def band(self):
# we should assign Task instances only or their outputs
self.t1 = ENoAssignedOutput
class EBandWithError(PipelineTask):
t1 = output
t2 = output
def band(self):
raise Exception("User exception in band method")
@pipeline
def e_band_raise():
raise Exception("User exception in band method")
@task
def e_task_fails():
raise_3(None)
# raise Exception("User exception in band method")
@pipeline
def e_wrong_task_constructor():
return SimplestTask(not_existing_param=1)
@task
def e_task_with_kwargs(a=2, **kwargs):
# (int, **Any) -> int
logging.info("KWARGS: %s", kwargs)
return a
@pipeline
def e_task_with_kwargs_pipeline():
return e_task_with_kwargs(10, kwarg=1)
class TErrorRunTask(TTask):
def run(self):
raise TypeError("Some user error")
class TError2RunTask(TTask):
def run(self):
raise Exception("Some user error")
class TLongTimeRunning(TTask):
sleep = parameter.value(default=0)
def run(self):
if self.sleep:
sleep(self.sleep)
super(TLongTimeRunning, self).run()
raise Exception("Some user error")
class TNestedPipeline(PipelineTask):
long_time_run = output
def band(self):
self.long_time_run = TLongTimeRunning().simplest_output
class TPipeWithErrors(PipelineTask):
t1 = output
t2 = output
t_pipeline = output
def band(self):
self.t1 = TErrorRunTask()
self.t2 = TError2RunTask()
self.t_pipeline = TNestedPipeline().long_time_run
class TPipelineWrongAssignment(PipelineTask):
some_output = output
def band(self):
self.some_output = PipelineTask
class TTaskMissingParamsMultiple(TTask):
p1 = parameter[int]
p2 = parameter[int]
p3 = parameter[int]
@band
def pipe_bad_band(failure="missing_params"):
if failure == "missing_params":
return TTaskMissingParamsMultiple()
elif failure == "read_error":
return target("not_exists").read()
elif failure == "task_run":
return TErrorRunTask()
else:
raise Exception("just an error")
@band
def pipe_bad_band2():
return pipe_bad_band()
@band
def pipe_bad_band3():
return pipe_bad_band2()
class TaskBadBand1(PipelineTask):
failure = parameter[str]
def band(self):
raise_failure(self.failure)
class TaskBadBand2(PipelineTask):
def band(self):
return TaskBadBand1()
class TaskBadBand3(PipelineTask):
def band(self):
return TaskBadBand2()
@pipeline
def pipe_with_task_with_params():
return SimpleTask()
@pipeline
def p2_with_task_with_params():
return pipe_with_task_with_params()
| 19.668394 | 70 | 0.686249 | 452 | 3,796 | 5.606195 | 0.236726 | 0.027624 | 0.04341 | 0.040253 | 0.375691 | 0.262431 | 0.214286 | 0.198895 | 0.198895 | 0.165746 | 0 | 0.012487 | 0.219442 | 3,796 | 192 | 71 | 19.770833 | 0.842727 | 0.042413 | 0 | 0.425 | 0 | 0 | 0.067236 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.225 | false | 0 | 0.058333 | 0.075 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
d2c86d0b6b0b995a3a1514375dd1b5db44618411 | 2,483 | py | Python | pyxrd/calculations/goniometer.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
] | 27 | 2018-06-15T15:28:18.000Z | 2022-03-10T12:23:50.000Z | pyxrd/calculations/goniometer.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
] | 22 | 2018-06-14T08:29:16.000Z | 2021-07-05T13:33:44.000Z | pyxrd/calculations/goniometer.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
] | 8 | 2019-04-13T13:03:51.000Z | 2021-06-19T09:29:11.000Z | # coding=UTF-8
# ex:ts=4:sw=4:et=on
# Copyright (c) 2013, Mathijs Dumon
# All rights reserved.
# Complete license can be found in the LICENSE file.
import numpy as np
from scipy.special import erf
from math import sqrt
from .math_tools import sqrt2pi, sqrt8
def get_S(soller1, soller2):
_S = sqrt((soller1 * 0.5) ** 2 + (soller2 * 0.5) ** 2)
_S1S2 = soller1 * soller2
return _S, _S1S2
def get_T(range_theta, sigma_star, soller1, soller2):
sigma_star = float(max(sigma_star, 1e-18))
S, _ = get_S(soller1, soller2)
range_st = np.sin(range_theta)
Q = S / (sqrt8 * range_st * sigma_star)
return erf(Q) * sqrt2pi / (2.0 * sigma_star * S) - 2.0 * range_st * (1.0 - np.exp(-(Q ** 2.0))) / (S ** 2.0)
def get_lorentz_polarisation_factor(range_theta, sigma_star, soller1, soller2, mcr_2theta):
"""
Get the lorentz polarisation factor for the given sigma-star value,
soller slits, monochromator Bragg angle and the given theta range
"""
T = get_T(range_theta, sigma_star, soller1, soller2)
pol = np.cos(np.radians(mcr_2theta)) ** 2
return T * (1.0 + pol * (np.cos(2.0 * range_theta) ** 2)) / np.sin(range_theta)
def get_fixed_to_ads_correction_range(range_theta, goniometer):
return np.sin(range_theta)
def get_nm_from_t(theta, wavelength=0.154056, zero_for_inf=False):
"""
Convert the given theta angles (scalar or np array) to
nanometer spacings using the given wavelength
"""
return get_nm_from_2t(2.0 * theta, wavelength=wavelength, zero_for_inf=zero_for_inf)
def get_nm_from_2t(twotheta, wavelength=0.154056, zero_for_inf=False):
"""
Convert the given 2-theta angles (scalar or np array) to
nanometer spacings using the given wavelength
"""
if twotheta == 0:
return 0. if zero_for_inf else 1e16
else:
return wavelength / (2.0 * np.sin(np.radians(twotheta / 2.0)))
def get_t_from_nm(nm, wavelength=0.154056):
"""
Convert the given nanometer spacings (scalar or np array) to
theta angles using the given wavelength
"""
return get_2t_from_nm(nm, wavelength=wavelength) / 2
def get_2t_from_nm(nm, wavelength=0.154056):
"""
Convert the given nanometer spacings (scalar or np array) to
2-theta angles using the given wavelength
"""
twotheta = 0.0
if nm != 0:
twotheta = np.degrees(np.arcsin(max(-1.0, min(1.0, wavelength / (2.0 * nm))))) * 2.0
return twotheta
| 34.013699 | 112 | 0.667741 | 390 | 2,483 | 4.082051 | 0.271795 | 0.012563 | 0.031407 | 0.037688 | 0.400754 | 0.400754 | 0.28392 | 0.28392 | 0.237437 | 0.237437 | 0 | 0.058247 | 0.218687 | 2,483 | 72 | 113 | 34.486111 | 0.762371 | 0.273862 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.117647 | 0.029412 | 0.617647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
d2d99748251394ad5b95a8977a7f04ae00016e3f | 3,960 | py | Python | testing/test/list/test_extended_list.py | Xamaneone/Python-OOP | 7514cdc92bb4f7adf27666516739cbf42a35453c | [
"MIT"
] | null | null | null | testing/test/list/test_extended_list.py | Xamaneone/Python-OOP | 7514cdc92bb4f7adf27666516739cbf42a35453c | [
"MIT"
] | null | null | null | testing/test/list/test_extended_list.py | Xamaneone/Python-OOP | 7514cdc92bb4f7adf27666516739cbf42a35453c | [
"MIT"
] | null | null | null | import unittest
from extended_list import IntegerList
class IntegerListTests(unittest.TestCase):
def test_integer_list_add__when_int__expect_to_add_it(self):
integer_list = IntegerList()
internal_list = integer_list.add(1)
self.assertEqual([1], internal_list)
def test_integer_list_add__when_str__expect_exception(self):
integer_list = IntegerList()
with self.assertRaises(ValueError):
integer_list.add('as')
def test_integer_list_remove_index__when_valid_index__expect_to_remove_and_return_it(self):
value_to_be_removed = 3
integer_list = IntegerList(1, 2, value_to_be_removed, 4)
result = integer_list.remove_index(2)
self.assertEqual(value_to_be_removed, result)
self.assertListEqual([1, 2, 4], integer_list.get_data())
def test_integer_list_remove_index__when_invalid_negative_index__expect_exception(self):
integer_list = IntegerList(1, 2, 3)
index = -4
with self.assertRaises(IndexError):
integer_list.remove_index(index)
def test_integer_list_remove_index__when_invalid_positive_index__expect_exception(self):
integer_list = IntegerList(1, 2, 3)
index = 3
with self.assertRaises(IndexError):
integer_list.remove_index(index)
def test_integer_list_init__when_integers__expect_to_create_it(self):
numbers = 1, 2, 3
list_of_numbers = [1, 2, 3]
integer_list = IntegerList(numbers)
self.assertEqual(list_of_numbers, integer_list.get_data())
def test_integer_list_init__when_not_only_integers__expect_exception(self):
with self.assertRaises(Exception):
IntegerList(1, 2, 'as')
def test_integer_list_get__when_valid_index__expect_to_return_it(self):
integer_list = IntegerList(1, 2, 3, 4)
actual = integer_list.get(3)
self.assertEqual(4, actual)
def test_integer_list_get__when_invalid_negative_index__expect_exception(self):
integer_list = IntegerList(1, 2, 3, 4)
with self.assertRaises(IndexError):
integer_list.get(-5)
def test_integer_list_get__when_invalid_positive_index__expect_exception(self):
integer_list = IntegerList(1, 2, 3, 4)
with self.assertRaises(IndexError):
integer_list.get(4)
def test_integer_list_insert__when_valid_index_and_value__expect_to_insert_it(self):
integer_list = IntegerList(1, 2, 3, 5)
integer_list.insert(3, 4)
self.assertEqual([1, 2, 3, 4, 5], integer_list.get_data())
def test_integer_list_insert__when_invalid_negative_index__expect_exception(self):
integer_list = IntegerList(1, 2, 3, 5)
with self.assertRaises(IndexError):
integer_list.insert(-13, 4)
def test_integer_list_insert__when_invalid_positive_index__expect_exception(self):
integer_list = IntegerList(1, 2, 3)
with self.assertRaises(IndexError):
integer_list.insert(4, 4)
def test_integer_list_insert__when_value_is_str__expect_exception(self):
integer_list = IntegerList(1, 2, 3)
with self.assertRaises(IndexError):
integer_list.insert(1, "I am string")
def test_integer_list_get_biggest__expect_to_return_the_biggest(self):
biggest = 17
integer_list = IntegerList(1, 2, biggest, 3, 4)
actual = integer_list.get_biggest()
self.assertEqual(biggest, actual)
def test_integer_list_get_index__when_value_in_list__expect_to_return_the_index(self):
integer_list = IntegerList(1, 2, 3, 4)
actual = integer_list.get_index(2)
self.assertEqual(1, actual)
def test_integer_list_get_index__when_value_not_in_list__expect_exception(self):
integer_list = IntegerList(1, 2, 3, 4)
with self.assertRaises(IndexError):
integer_list.get_index(30)
if __name__ == '__main__':
unittest.main()
| 37.358491 | 95 | 0.713384 | 530 | 3,960 | 4.826415 | 0.132075 | 0.219312 | 0.093041 | 0.119625 | 0.683346 | 0.629789 | 0.583268 | 0.481626 | 0.417123 | 0.385066 | 0 | 0.027672 | 0.206061 | 3,960 | 105 | 96 | 37.714286 | 0.785941 | 0 | 0 | 0.294872 | 0 | 0 | 0.005808 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.217949 | false | 0 | 0.025641 | 0 | 0.25641 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
d2e272e377c7dde4446da42ae9fee4dc5d85b11b | 247 | py | Python | component/handler.py | garlfin/entity-component-system | bc0cdc0ac22fbb4fd4f1beb54a1db8cec5c35cb1 | [
"MIT"
] | null | null | null | component/handler.py | garlfin/entity-component-system | bc0cdc0ac22fbb4fd4f1beb54a1db8cec5c35cb1 | [
"MIT"
] | null | null | null | component/handler.py | garlfin/entity-component-system | bc0cdc0ac22fbb4fd4f1beb54a1db8cec5c35cb1 | [
"MIT"
] | null | null | null | from enum import Enum
from component import transform, name
class ComponentTypes(Enum):
transform = 0
name = 1
ComponentDict = {
ComponentTypes.transform: transform.TransformComponent,
ComponentTypes.name: name.nameComponent
}
| 17.642857 | 59 | 0.753036 | 25 | 247 | 7.44 | 0.52 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009901 | 0.182186 | 247 | 13 | 60 | 19 | 0.910891 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.555556 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
d2e82cdd820929c916ba43b59d4f07c37a9ba697 | 6,130 | py | Python | mgmtsystem/openstack_infra.py | jkandasa/wrapanapi | ccd8ca44d3a18060f789713cfc62674aeccf142e | [
"MIT"
] | null | null | null | mgmtsystem/openstack_infra.py | jkandasa/wrapanapi | ccd8ca44d3a18060f789713cfc62674aeccf142e | [
"MIT"
] | null | null | null | mgmtsystem/openstack_infra.py | jkandasa/wrapanapi | ccd8ca44d3a18060f789713cfc62674aeccf142e | [
"MIT"
] | null | null | null | # coding: utf-8
from keystoneclient.v2_0 import client as oskclient
from novaclient import client as osclient
from novaclient.client import HTTPClient
from requests.exceptions import Timeout
from base import MgmtSystemAPIBase
# TODO The following monkeypatch nonsense is criminal, and would be
# greatly simplified if openstack made it easier to specify a custom
# client class. This is a trivial PR that they're likely to accept.
# Note: This same mechanism may be required for keystone and cinder
# clients, but hopefully won't be.
# monkeypatch method to add retry support to openstack
def _request_timeout_handler(self, url, method, retry_count=0, **kwargs):
try:
# Use the original request method to do the actual work
return HTTPClient.request(self, url, method, **kwargs)
except Timeout:
if retry_count >= 3:
self._cfme_logger.error('nova request timed out after {} retries'.format(retry_count))
raise
else:
# feed back into the replaced method that supports retry_count
retry_count += 1
self._cfme_logger.error('nova request timed out; retry {}'.format(retry_count))
return self.request(url, method, retry_count=retry_count, **kwargs)
class OpenstackInfraSystem(MgmtSystemAPIBase):
"""Openstack Infrastructure management system
# TODO
"""
_stats_available = {
'num_template': lambda self: len(self.list_template()),
'num_host': lambda self: len(self.list_host()),
}
states = {
'running': ('ACTIVE',),
'stopped': ('SHUTOFF',),
'suspended': ('SUSPENDED',),
}
can_suspend = True
def __init__(self, **kwargs):
super(OpenstackInfraSystem, self).__init__(kwargs)
self.tenant = kwargs['tenant']
self.username = kwargs['username']
self.password = kwargs['password']
self.auth_url = kwargs['auth_url']
self._api = None
self._kapi = None
self._capi = None
@property
def api(self):
if not self._api:
self._api = osclient.Client('2',
self.username,
self.password,
self.tenant,
self.auth_url,
service_type="compute",
insecure=True,
timeout=30)
# replace the client request method with our version that
# can handle timeouts; uses explicit binding (versus
# replacing the method directly on the HTTPClient class)
# so we can still call out to HTTPClient's original request
# method in the timeout handler method
self._api.client._cfme_logger = self.logger
self._api.client.request = _request_timeout_handler.__get__(self._api.client,
HTTPClient)
return self._api
@property
def kapi(self):
if not self._kapi:
self._kapi = oskclient.Client(username=self.username,
password=self.password,
tenant_name=self.tenant,
auth_url=self.auth_url,
insecure=True)
return self._kapi
@property
def nodes(self):
return self.api.servers.list()
@property
def images(self):
return self.api.images.list()
@property
def networks(self):
return self.api.networks.list()
def start_vm(self, vm_name):
raise NotImplementedError('start_vm not implemented.')
def wait_vm_running(self, vm_name, num_sec):
raise NotImplementedError('wait_vm_running not implemented.')
def stop_vm(self, vm_name):
raise NotImplementedError('stop_vm not implemented.')
def wait_vm_stopped(self, vm_name, num_sec):
raise NotImplementedError('wait_vm_stopped not implemented.')
def create_vm(self, vm_name):
raise NotImplementedError('create_vm not implemented.')
def delete_vm(self, vm_name):
raise NotImplementedError('delete_vm not implemented.')
def restart_vm(self, vm_name):
raise NotImplementedError('restart_vm not implemented.')
def vm_status(self, vm_name):
raise NotImplementedError('vm_status not implemented.')
def is_vm_running(self, vm_name):
raise NotImplementedError('is_vm_running not implemented.')
def is_vm_stopped(self, vm_name):
raise NotImplementedError('is_vm_stopped not implemented.')
def is_vm_suspended(self, vm_name):
raise NotImplementedError('is_vm_suspended not implemented.')
def suspend_vm(self, vm_name):
raise NotImplementedError('restart_vm not implemented.')
def wait_vm_suspended(self, vm_name, num_sec):
raise NotImplementedError('wait_vm_suspended not implemented.')
def list_vm(self, **kwargs):
raise NotImplementedError('list_vm not implemented.')
def list_template(self):
return [image.name for image in self.images]
def list_flavor(self):
raise NotImplementedError('list_flavor not implemented.')
def list_network(self):
return [network.name for network in self.networks]
def list_host(self):
return [node.name for node in self.nodes]
def info(self):
raise NotImplementedError('info not implemented.')
def disconnect(self):
pass
def clone_vm(self, source_name, vm_name):
raise NotImplementedError()
def does_vm_exist(self, name):
raise NotImplementedError()
def deploy_template(self, template, *args, **kwargs):
raise NotImplementedError()
def current_ip_address(self, vm_name):
raise NotImplementedError()
def get_ip_address(self, vm_name):
""
raise NotImplementedError()
def remove_host_from_cluster(self, hostname):
raise NotImplementedError()
# TODO
| 33.135135 | 98 | 0.627406 | 696 | 6,130 | 5.331897 | 0.277299 | 0.14228 | 0.073296 | 0.105093 | 0.289949 | 0.200485 | 0.146591 | 0.115872 | 0.070601 | 0.033414 | 0 | 0.002067 | 0.289723 | 6,130 | 184 | 99 | 33.315217 | 0.850253 | 0.129364 | 0 | 0.106557 | 0 | 0 | 0.116428 | 0 | 0 | 0 | 0 | 0.016304 | 0 | 1 | 0.270492 | false | 0.032787 | 0.040984 | 0.04918 | 0.42623 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.