hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
2ae5dfb215b19942bd9a30e35a3057db94d2a78f
299
py
Python
Python-codes-CeV/51-Arithmetic_P.py
engcristian/Python
726a53e9499fd5d0594572298e59e318f98e2d36
[ "MIT" ]
1
2021-02-22T03:53:23.000Z
2021-02-22T03:53:23.000Z
Python-codes-CeV/51-Arithmetic_P.py
engcristian/Python
726a53e9499fd5d0594572298e59e318f98e2d36
[ "MIT" ]
null
null
null
Python-codes-CeV/51-Arithmetic_P.py
engcristian/Python
726a53e9499fd5d0594572298e59e318f98e2d36
[ "MIT" ]
null
null
null
''' Arithmetic progression with 10 elements. ''' first_term = int(input('Type the first term of this A.P: ')) reason = int(input('Type the reason of this A.P: ')) last_term = first_term + (50-1)*reason # A.P formula for c in range (first_term, last_term + reason , reason): print(c, end=' ► ')
29.9
60
0.668896
51
299
3.843137
0.529412
0.183673
0.122449
0.153061
0
0
0
0
0
0
0
0.020325
0.177258
299
9
61
33.222222
0.772358
0.177258
0
0
0
0
0.273109
0
0
0
0
0
0
1
0
false
0
0
0
0
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2aecb72c40fb43c9fc2cbc47d687dce7b40d1d6a
1,363
py
Python
ferry/nlp/fetch/fetch_evals.py
coursetable/ferry
f369b9588557c359af8589f2575a03493d6b08b6
[ "MIT" ]
4
2020-11-12T19:37:06.000Z
2021-12-14T01:38:39.000Z
ferry/nlp/fetch/fetch_evals.py
coursetable/ferry
f369b9588557c359af8589f2575a03493d6b08b6
[ "MIT" ]
96
2020-09-08T05:17:17.000Z
2022-03-31T23:12:51.000Z
ferry/nlp/fetch/fetch_evals.py
coursetable/ferry
f369b9588557c359af8589f2575a03493d6b08b6
[ "MIT" ]
2
2021-03-03T23:02:40.000Z
2021-06-17T23:33:05.000Z
# pylint: skip-file import json import requests url = "http://localhost:8085/v1/graphql" def fetch_evals(term, dump=True): query = ( """ query MyQuery { courses(where: { season: {season_code: {_eq: \"""" + str(term) + """\"}}, school: {_eq: "YC"}, average_rating: {_is_null: false}, average_workload: {_is_null: false}, extra_info: {_neq: "CANCELLED"} }) { course_id title season_code course_professors {professor {name}} listings {subject number section} average_rating average_workload description evaluation_narratives {question_code comment} evaluation_statistics {enrollment} skills areas } } """ ) r = requests.post(url, json={"query": query}, verify=False) data = json.loads(r.text)["data"]["courses"] if dump: with open("./../data/evals/" + str(term) + ".txt", "w+") as outfile: json.dump(data, outfile) return data for year in range(2009, 2021): print(year) for term in range(1, 4): fetch_evals(str(year) + "0" + str(term), True)
26.211538
76
0.487894
131
1,363
4.916031
0.618321
0.032609
0.034161
0
0
0
0
0
0
0
0
0.019277
0.391049
1,363
51
77
26.72549
0.756627
0.012472
0
0
0
0
0.600484
0.033898
0
0
0
0
0
1
0.025641
false
0
0.051282
0
0.102564
0.025641
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2af450314f09ee3a74127b6e73b3040d23c048eb
2,179
py
Python
telemetry_client_bgp_sessions.py
akshshar/bigmuddy-network-telemetry-proto
26ea64cf9910e41c62270fea3b0aa318dd1a51db
[ "Apache-2.0" ]
null
null
null
telemetry_client_bgp_sessions.py
akshshar/bigmuddy-network-telemetry-proto
26ea64cf9910e41c62270fea3b0aa318dd1a51db
[ "Apache-2.0" ]
null
null
null
telemetry_client_bgp_sessions.py
akshshar/bigmuddy-network-telemetry-proto
26ea64cf9910e41c62270fea3b0aa318dd1a51db
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # Standard python libs import os,sys sys.path.append("./src/genpy") import ast, pprint import pdb import yaml, json import telemetry_pb2 from mdt_grpc_dialin import mdt_grpc_dialin_pb2 from mdt_grpc_dialin import mdt_grpc_dialin_pb2_grpc import json_format import grpc # # Get the GRPC Server IP address and port number # def get_server_ip_port(): # Get GRPC Server's IP from the environment if 'SERVER_IP' not in os.environ.keys(): print "Need to set the SERVER_IP env variable e.g." print "export SERVER_IP='10.30.110.214'" os._exit(0) # Get GRPC Server's Port from the environment if 'SERVER_PORT' not in os.environ.keys(): print "Need to set the SERVER_PORT env variable e.g." print "export SERVER_PORT='57777'" os._exit(0) return (os.environ['SERVER_IP'], int(os.environ['SERVER_PORT'])) # # Setup the GRPC channel with the server, and issue RPCs # if __name__ == '__main__': server_ip, server_port = get_server_ip_port() print "Using GRPC Server IP(%s) Port(%s)" %(server_ip, server_port) # Create the channel for gRPC. channel = grpc.insecure_channel(str(server_ip)+":"+str(server_port)) unmarshal = True # Ereate the gRPC stub. stub = mdt_grpc_dialin_pb2_grpc.gRPCConfigOperStub(channel) metadata = [('username', 'vagrant'), ('password', 'vagrant')] Timeout = 3600*24*365 # Seconds sub_args = mdt_grpc_dialin_pb2.CreateSubsArgs(ReqId=99, encode=3, subidstr='BGP-SESSION') stream = stub.CreateSubs(sub_args, timeout=Timeout, metadata=metadata) for segment in stream: if not unmarshal: print segment else: # Go straight for telemetry data telemetry_pb = telemetry_pb2.Telemetry() encoding_path = 'Cisco-IOS-XR-ipv4-bgp-oper:bgp/instances/'+\ 'instance/instance-active/default-vrf/sessions/session' # Return in JSON format instead of protobuf. if json.loads(segment.data)["encoding_path"] == encoding_path: print json.dumps(json.loads(segment.data), indent=3) os._exit(0)
30.263889
93
0.671409
307
2,179
4.57329
0.390879
0.062678
0.055556
0.045584
0.212963
0.158832
0.158832
0.116097
0.116097
0.116097
0
0.022485
0.224415
2,179
71
94
30.690141
0.808284
0.166131
0
0.075
0
0
0.214761
0.066038
0
0
0
0
0
0
null
null
0.025
0.225
null
null
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2af5ae6a3bb26a288651a381ee3ed2c4e61944eb
436
py
Python
Python3-functions/define_custom_exception_class.py
ipetel/code-snippets
e05bb8ef1f5d213aadba501d80b310507a1af117
[ "MIT" ]
1
2020-08-07T14:57:28.000Z
2020-08-07T14:57:28.000Z
Python3-functions/define_custom_exception_class.py
ipetel/code-snippets
e05bb8ef1f5d213aadba501d80b310507a1af117
[ "MIT" ]
null
null
null
Python3-functions/define_custom_exception_class.py
ipetel/code-snippets
e05bb8ef1f5d213aadba501d80b310507a1af117
[ "MIT" ]
1
2020-12-12T08:29:56.000Z
2020-12-12T08:29:56.000Z
''' This code is a simple example how to define custom exception class in Python ''' # custom exception class class CustomError(Exception): def __init__(self,message): self.message = message super().__init__(self.message) # use it whenever you need in your code as follows: try: ... <some code> ... except Exception as e: print(f'### [ERROR] - {e}') raise CustomError('some error message')
21.8
76
0.651376
57
436
4.842105
0.631579
0.119565
0.144928
0
0
0
0
0
0
0
0
0
0.233945
436
19
77
22.947368
0.826347
0.165138
0
0.181818
0
0
0.126354
0
0
0
0
0
0
0
null
null
0
0
null
null
0.090909
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
630b1968948a312327fd4f74f6cc73dcebabe1eb
10,346
py
Python
podcats/__init__.py
moritzj29/podcats
fc2bf1656eba5e6cc5062fe2b55717ef9b24dd5b
[ "BSD-3-Clause" ]
null
null
null
podcats/__init__.py
moritzj29/podcats
fc2bf1656eba5e6cc5062fe2b55717ef9b24dd5b
[ "BSD-3-Clause" ]
null
null
null
podcats/__init__.py
moritzj29/podcats
fc2bf1656eba5e6cc5062fe2b55717ef9b24dd5b
[ "BSD-3-Clause" ]
null
null
null
""" Podcats is a podcast feed generator and a server. It generates RSS feeds for podcast episodes from local audio files and, optionally, exposes the feed and as well as the episode file via a built-in web server so that they can be imported into iTunes or another podcast client. """ import os import re import time import argparse import mimetypes from email.utils import formatdate from os import path from xml.sax.saxutils import escape, quoteattr try: from urllib.request import pathname2url except ImportError: # For python 2 # noinspection PyUnresolvedReferences from urllib import pathname2url import mutagen import humanize from mutagen.id3 import ID3 from flask import Flask, Response # noinspection PyPackageRequirements from jinja2 import Environment, FileSystemLoader from collections import defaultdict __version__ = '0.6.3' __licence__ = 'BSD' __author__ = 'Jakub Roztocil' __url__ = 'https://github.com/jakubroztocil/podcats' WEB_PATH = '/web' STATIC_PATH = '/static' TEMPLATES_ROOT = os.path.join(os.path.dirname(__file__), 'templates') BOOK_COVER_EXTENSIONS = ('.jpg', '.jpeg', '.png') jinja2_env = Environment(loader=FileSystemLoader(TEMPLATES_ROOT)) class Episode(object): """Podcast episode""" def __init__(self, filename, relative_dir, root_url, title_format='{filename}{title}'): self.filename = filename self.relative_dir = relative_dir self.root_url = root_url self.length = os.path.getsize(filename) self.tags = mutagen.File(self.filename, easy=True) self.title_format = title_format print(self.tags) try: self.id3 = ID3(self.filename) except Exception: self.id3 = None def __lt__(self, other): return self.date < other.date def __gt__(self, other): return self.date > other.date def __cmp__(self, other): a, b = self.date, other.date return (a > b) - (a < b) # Python3 cmp() equivalent def as_xml(self): """Return episode item XML""" template = jinja2_env.get_template('episode.xml') return template.render( title=escape(self.title), url=quoteattr(self.url), guid=escape(self.url), mimetype=self.mimetype, length=self.length, date=formatdate(self.date), image_url=self.image, description=self.description, ) def as_html(self): """Return episode item html""" filename = os.path.basename(self.filename) directory = os.path.split(os.path.dirname(self.filename))[-1] template = jinja2_env.get_template('episode.html') return template.render( title=escape(self.title), url=self.url, filename=filename, directory=directory, mimetype=self.mimetype, length=humanize.naturalsize(self.length), date=formatdate(self.date), image_url=self.image, description=self.description, ) def get_tag(self, name): """Return episode file tag info""" try: return self.tags[name][0] except (KeyError, IndexError): pass def _to_url(self, filepath): fn = os.path.basename(filepath) path = STATIC_PATH + '/' + self.relative_dir + '/' + fn path = re.sub(r'//', '/', path) url = self.root_url + pathname2url(path) return url @property def title(self): """Return episode title""" filename = os.path.splitext(os.path.basename(self.filename))[0] try: args = defaultdict(lambda: '<unset>', {'filename': filename}) for key, value in self.tags.items(): args[key.lower()] = value[0] # use first entry only text = self.title_format.format_map(args) except Exception: print('Failed setting title for file {}. Using filename as title.'.format(filename)) text = filename return text @property def url(self): """Return episode url""" return self._to_url(self.filename) @property def date(self): """Return episode date as unix timestamp""" dt = self.get_tag('date') if dt: formats = [ '%Y-%m-%d:%H:%M:%S', '%Y-%m-%d:%H:%M', '%Y-%m-%d:%H', '%Y-%m-%d', '%Y-%m', '%Y', ] for fmt in formats: try: dt = time.mktime(time.strptime(dt, fmt)) break except ValueError: pass else: dt = None if not dt: dt = os.path.getmtime(self.filename) return dt @property def mimetype(self): """Return file mimetype name""" if self.filename.endswith('m4b'): return 'audio/x-m4b' else: return mimetypes.guess_type(self.filename)[0] @property def image(self): """Return an eventual cover image""" directory = os.path.split(self.filename)[0] image_files = [] for fn in os.listdir(directory): ext = os.path.splitext(fn)[1] if ext.lower() in BOOK_COVER_EXTENSIONS: image_files.append(fn) if len(image_files) > 0: abs_path_image = image_files[0] return self._to_url(abs_path_image) else: return None @property def description(self): """Return description""" try: return self.tags['description'][0] except Exception: return '' class Channel(object): """Podcast channel""" def __init__(self, root_dir, root_url, host, port, title, link, debug=False, video=False, title_format='{filename}{title}'): self.root_dir = root_dir or os.getcwd() self.root_url = root_url self.host = host self.port = int(port) self.link = link or self.root_url self.title = title or os.path.basename( os.path.abspath(self.root_dir.rstrip('/'))) self.description = 'Feed generated by <a href="%s">Podcats</a>.' % __url__ self.debug = debug self.video = video self.title_format = title_format def __iter__(self): for root, _, files in os.walk(self.root_dir): relative_dir = root[len(self.root_dir):] for fn in files: filepath = os.path.join(root, fn) mimetype = mimetypes.guess_type(filepath)[0] if (mimetype and 'audio' in mimetype or filepath.endswith('m4b') or (mimetype and 'video' in mimetype and self.video is True) ): yield Episode(filepath, relative_dir, self.root_url, title_format=self.title_format) def as_xml(self): """Return channel XML with all episode items""" template = jinja2_env.get_template('feed.xml') return template.render( title=escape(self.title), description=escape(self.description), link=escape(self.link), items=u''.join(episode.as_xml() for episode in sorted(self)) ).strip() def as_html(self): """Return channel HTML with all episode items""" template = jinja2_env.get_template('feed.html') return template.render( title=escape(self.title), description=self.description, link=escape(self.link), items=u''.join(episode.as_html() for episode in sorted(self)), ).strip() def serve(channel): """Serve podcast channel and episodes over HTTP""" server = Flask( __name__, static_folder=channel.root_dir, static_url_path=STATIC_PATH, ) server.route('/')( lambda: Response( channel.as_xml(), content_type='application/xml; charset=utf-8') ) server.add_url_rule( WEB_PATH, view_func=channel.as_html, methods=['GET'], ) server.run(host=channel.host, port=channel.port, debug=channel.debug, threaded=True) def main(): """Main function""" args = parser.parse_args() url = 'http://' + args.host + ':' + args.port channel = Channel( root_dir=path.abspath(args.directory), root_url=url, host=args.host, port=args.port, title=args.title, link=args.link, debug=args.debug, video=args.video, title_format=args.title_format ) if args.action == 'generate': print(channel.as_xml()) elif args.action == 'generate_html': print(channel.as_html()) else: print('Welcome to the Podcats web server!') print('\nYour podcast feed is available at:\n') print('\t' + channel.root_url + '\n') print('The web interface is available at\n') print('\t{url}{web_path}\n'.format(url=url, web_path=WEB_PATH)) serve(channel) parser = argparse.ArgumentParser( description='Podcats: podcast feed generator and server <%s>.' % __url__ ) parser.add_argument( '--host', default='localhost', help='listen hostname or IP address' ) parser.add_argument( '--port', default='5000', help='listen tcp port number' ) parser.add_argument( 'action', metavar='COMMAND', choices=['generate', 'generate_html', 'serve'], help='`generate` the RSS feed to the terminal, or' '`serve` the generated RSS as well as audio files' ' via the built-in web server' ) parser.add_argument( 'directory', metavar='DIRECTORY', help='path to a directory with episode audio files', ) parser.add_argument( '--debug', action="store_true", help='Serve with debug mode on' ) parser.add_argument('--title', help='optional feed title') parser.add_argument('--link', help='optional feed link') parser.add_argument( '--video', action="store_true", help='include video files as well' ) parser.add_argument( '--title-format', dest='title_format', default='{filename}{title}', help='title format string and arguments' ) if __name__ == '__main__': main()
29.988406
128
0.593853
1,237
10,346
4.823767
0.226354
0.015083
0.025641
0.013407
0.187531
0.142115
0.116306
0.106251
0.060667
0.060667
0
0.005153
0.287164
10,346
344
129
30.075581
0.803932
0.078871
0
0.213768
1
0
0.121373
0.00233
0
0
0
0
0
1
0.072464
false
0.007246
0.061594
0.007246
0.206522
0.032609
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
630d031d30f4b3afe57f014af81f537273595260
2,861
py
Python
cytoskeleton_analyser/database/sqlite_alchemy_orm/containers/cell_elements.py
vsukhor/cytoskeleton-analyser
681a1f6ba1381a5fb293f2310fce5e97d400cfcb
[ "BSD-3-Clause" ]
null
null
null
cytoskeleton_analyser/database/sqlite_alchemy_orm/containers/cell_elements.py
vsukhor/cytoskeleton-analyser
681a1f6ba1381a5fb293f2310fce5e97d400cfcb
[ "BSD-3-Clause" ]
null
null
null
cytoskeleton_analyser/database/sqlite_alchemy_orm/containers/cell_elements.py
vsukhor/cytoskeleton-analyser
681a1f6ba1381a5fb293f2310fce5e97d400cfcb
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2021 Valerii Sukhorukov. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER ''AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ----------------------------------------------------------------------------- """Subclasses of container base class for tailored to cell components. """ from collections import namedtuple from . import container from .. import models class MembraneNucleus(container.Optional): """Container class for configuration of cell nuclear membrane. """ model = models.ConfigNucleus class MembranePlasma(container.Optional): """Container class for configuration of cell plasma membrane. """ model = models.ConfigPlasma class InSpace(container.Base): """Container class for configuration of unanchored microtubule MTOC. """ model = models.ConfigMtocInSpace class Golgi(container.Base): """Container class for configuration of Golgi-type MTOC. """ model = models.ConfigMtocGolgi class Centrosome(container.Base): """Container class for configuration of centrosome-type MTOC. """ model = models.ConfigMtocCentrosome class Nucleus(container.Base): """Container class for configuration of Nucleus-type MTOC. """ model = models.ConfigMtocNucleus #: Types of Microtubule Organizing Centers (MTOCs). Mtoc = namedtuple('Mtoc', 'InSpace Golgi Centrosome Nucleus') # Microtubule Organizing Centers (MTOCs). mtoc = Mtoc(InSpace, Golgi, Centrosome, Nucleus)
35.320988
79
0.739602
356
2,861
5.94382
0.455056
0.026465
0.048204
0.085066
0.28828
0.199433
0.199433
0.114367
0.064272
0.064272
0
0.002947
0.169871
2,861
80
80
35.7625
0.888
0.735058
0
0
0
0
0.051724
0
0
0
0
0
0
1
0
false
0
0.176471
0
0.882353
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
630e334606bd98d4e81def6c3b2ffa679f3a7e67
948
py
Python
snow/views.py
cdmaok/web-sentiment
15fbe33327f9272035393d8c9991c06933163cc2
[ "Apache-2.0" ]
null
null
null
snow/views.py
cdmaok/web-sentiment
15fbe33327f9272035393d8c9991c06933163cc2
[ "Apache-2.0" ]
null
null
null
snow/views.py
cdmaok/web-sentiment
15fbe33327f9272035393d8c9991c06933163cc2
[ "Apache-2.0" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt import json from snownlp import SnowNLP # Create your views here. @csrf_exempt def index(request): message = {} print request.method if request.method == 'GET': message = construct_message(request.GET,'text') elif request.method == 'POST': print '%r' %request message = construct_message(json.loads(request.body),'text') else: print 'invalid request' return HttpResponse(json.dumps(message)) def construct_message(parameter,key): message = {} if not parameter.has_key(key): message['Code'] = 400 message['Message'] = 'invalid request' else: text = parameter[key] if text.strip() == '': message['Code'] = 406 message['Message'] = 'empty text' else: s = SnowNLP(text) score = s.sentiments print text,score message['Code'] = 200 message['Message'] = score return message
23.7
62
0.709916
122
948
5.467213
0.393443
0.044978
0.068966
0
0
0
0
0
0
0
0
0.011349
0.163502
948
39
63
24.307692
0.82976
0.024262
0
0.147059
0
0
0.097614
0
0
0
0
0
0
0
null
null
0
0.147059
null
null
0.117647
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
630fea4816a8abd9eaf09ba98c364ef95304a04e
7,559
py
Python
branches/g3d-8.0-64ffmpeg-win/bin/ice/doxygen.py
brown-ccv/VRG3D
0854348453ac150b27a8ae89024ef57360f15d45
[ "BSD-3-Clause" ]
null
null
null
branches/g3d-8.0-64ffmpeg-win/bin/ice/doxygen.py
brown-ccv/VRG3D
0854348453ac150b27a8ae89024ef57360f15d45
[ "BSD-3-Clause" ]
null
null
null
branches/g3d-8.0-64ffmpeg-win/bin/ice/doxygen.py
brown-ccv/VRG3D
0854348453ac150b27a8ae89024ef57360f15d45
[ "BSD-3-Clause" ]
null
null
null
# doxygen.py # # Doxygen Management from utils import * import glob ############################################################################## # Doxygen Management # ############################################################################## """ Called from buildDocumentation. """ def createDoxyfile(state): # Create the template, surpressing Doxygen's usual output shell("doxygen -g Doxyfile > /dev/null") # Edit it f = open('Doxyfile', 'r+') text = f.read() # TODO: excludes propertyMapping = { 'PROJECT_NAME' : '"' + state.projectName.capitalize() + '"', 'OUTPUT_DIRECTORY' : '"' + pathConcat(state.buildDir, 'doc') + '"', 'EXTRACT_ALL' : "YES", 'STRIP_FROM_PATH' : '"' + state.rootDir + '"', 'TAB_SIZE' : "4", 'QUIET' : 'YES', 'WARN_IF_UNDOCUMENTED' : 'NO', 'WARN_NO_PARAMDOC' : 'NO', 'HTML_OUTPUT' : '"./"', 'GENERATE_LATEX' : 'NO', 'RECURSIVE' : 'YES', 'SORT_BRIEF_DOCS' : 'YES', 'MACRO_EXPANSION' : 'YES', 'JAVADOC_AUTOBRIEF' : 'YES', 'EXCLUDE' : 'build graveyard temp doc-files data-files', "ALIASES" : ('"cite=\par Referenced Code:\\n " ' + '"created=\par Created:\\n" ' + '"edited=\par Last modified:\\n" ' + '"maintainer=\par Maintainer:\\n" ' + '"units=\par Units:\\n"') } # Rewrite the text by replacing any of the above properties newText = "" for line in string.split(text,"\n"): newText += (doxyLineRewriter(line, propertyMapping) + "\n") # Write the file back out f.seek(0) f.write(newText) f.close() ######################################################################### """ Called from createDoxyfile. """ def doxyLineRewriter(lineStr, hash): line = string.strip(lineStr) # remove leading and trailing whitespace if (line == ''): # it's a blank line return lineStr elif (line[0] == '#'): # it's a comment line return lineStr else : # here we know it's a property assignment prop = string.strip(line[0:string.find(line, "=")]) if hash.has_key(prop): print prop + ' = ' + hash[prop] return prop + ' = ' + hash[prop] else: return lineStr class DoxygenRefLinkRemapper: # Given the output of a Doxygen directory, rewrites the output .html # files so that G3D::ReferenceCountedPointer<X> instances link to X # instead of ReferenceCountedPointer. # # The current implementation only works for the G3D build itself. # It is intended to be expanded in the future to support projects # built against G3D. def remap(self, sourcePath, remapPath): self.__buildValidRefs(sourcePath) self.__remapRefLinks(remapPath) def __buildValidRefs(self, sourcePath): # initialize the ref name mapping self.validRefs = {} # build list of valid source files sources = os.listdir(sourcePath) # discard non-class/struct documentation files sources = filter(lambda filename: re.search('^class|^struct', filename), sources) sources = filter(lambda filename: not re.search('-members.html$', filename), sources) # discard filenames with encoded spaces (implies templates) for now sources = filter(lambda filename: not re.search('_01', filename), sources) # build the dictionary mapping valid ref names to their documentation for filename in sources: memberRefName, nonmemberRefName = self.__buildRefNames(filename) self.validRefs.update({memberRefName:filename, nonmemberRefName:filename}) def __buildRefNames(self, filename): # build list of qualified scopes from filename capitalizedScopes = self.__buildScopes(filename) # build the qualified name used as prefix qualifiedPrefix = '' for scope in capitalizedScopes: qualifiedPrefix += scope + '::' # build the member typedef ref name (e.g., G3D::Class::Ref) memberRefName = qualifiedPrefix + 'Ref' # build the non-member ref name (e.g., G3D::ClassRef) nonmemberRefName = qualifiedPrefix[:-2] + 'Ref' return memberRefName, nonmemberRefName def __buildScopes(self, filename): # remove the file type ('class', 'struct', '.html') and separate the scopes ('::') sansType = re.split('class|struct', filename)[1] sansType = re.split('.html', sansType)[0] rawScopes = re.split('_1_1', sansType) # re-capitalize letters capitalizedScopes = [] for scope in rawScopes: scope = re.sub('_(?<=_)\w', lambda match: match.group(0)[1].upper(), scope) capitalizedScopes.append(scope) return capitalizedScopes def __remapRefLinks(self, remapPath): # initialize the current remapping filename self.currentRemapFilename = '' # loop through all valid html/documentation files in the remap path for filename in glob.glob(os.path.join(os.path.normcase(remapPath), '*.html')): self.currentRemapFilename = filename # will hold updated file contents remappedBuffer = '' # read each line in file and replace any matched ref links f = open(filename) try: for line in f: remappedBuffer += re.sub('(href="class_g3_d_1_1_reference_counted_pointer.html">)([a-zA-Z0-9:]+)(</a>)', self.__linkMatchCallack, line) finally: f.close() #assume lines were read and remapped correctly, write new documentation writeFile(filename, remappedBuffer) def __linkMatchCallack(self, match): # if ref search fails, build the fully qualified ref name that we can search the dictionary for # e.g., SuperShader::Pass::Ref would be qualified as G3D::SuperShader::Pass::Ref # ref links found in non-struct/class files will be matched as-is # note: this would have to be redone if multiple source directories is implemented if match.group(2) in self.validRefs: return 'href="' + self.validRefs[match.group(2)] + '">' + match.group(2) + match.group(3) elif re.search('class|struct', self.currentRemapFilename): # get list of scopes from current filename qualifiedScopes = self.__buildScopes(self.currentRemapFilename) # build a prefix including all of the scopes except for current one (should be class/struct) for numScopes in range(0, len(qualifiedScopes)): qualifiedPrefix = '' for scope in qualifiedScopes[:-numScopes]: qualifiedPrefix += scope + '::' qualifiedRef = qualifiedPrefix + match.group(2) if qualifiedRef in self.validRefs: return 'href="' + self.validRefs[qualifiedRef] + '">' + match.group(2) + match.group(3) return match.group(0)
40.207447
155
0.557878
756
7,559
5.510582
0.375661
0.021603
0.013202
0.019443
0.055449
0.047048
0.036486
0
0
0
0
0.006321
0.3093
7,559
187
156
40.42246
0.791611
0.259029
0
0.107843
0
0
0.125262
0.014468
0
0
0
0.005348
0
0
null
null
0
0.019608
null
null
0.009804
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
6310fb525d8e21534dcc57582305f77d4f195a47
1,806
py
Python
test_ocr.py
lanstonpeng/NightOwlServer
1810c631c44d53f885e79164c48a7cca61441cce
[ "MIT" ]
null
null
null
test_ocr.py
lanstonpeng/NightOwlServer
1810c631c44d53f885e79164c48a7cca61441cce
[ "MIT" ]
null
null
null
test_ocr.py
lanstonpeng/NightOwlServer
1810c631c44d53f885e79164c48a7cca61441cce
[ "MIT" ]
null
null
null
#!/usr/bin/env python # encoding: utf-8 import urllib, urllib2 import tempfile import base64 from PIL import Image import os # 全局变量 API_URL = 'http://apis.baidu.com/apistore/idlocr/ocr' API_KEY = "0c69d1b8ec1c96561cb9ca3c037d7225" def get_image_text(img_url=None): headers = {} # download image opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] #img_request = urllib2.Request(img_url, headers=headers) #img_data = urllib2.urlopen(img_request).read() response = opener.open(img_url) img_data = response.read() # save image to some place origin_img = tempfile.NamedTemporaryFile(delete=False) save_img = tempfile.NamedTemporaryFile(delete=False) origin_img.write(img_data) origin_img.flush() # convert image im = Image.open(origin_img.name) im.convert('RGB').save(save_img.name, "JPEG") with open(save_img.name, "rb") as image_file: encoded_image = base64.b64encode(image_file.read()) data = {} data['fromdevice'] = "pc" data['clientip'] = "10.10.10.0" data['detecttype'] = "LocateRecognize" data['languagetype'] = "CHN_ENG" data['imagetype'] = "1" data['image'] = encoded_image decoded_data = urllib.urlencode(data) req = urllib2.Request(API_URL, data = decoded_data) req.add_header("Content-Type", "application/x-www-form-urlencoded") req.add_header("apikey", API_KEY) resp = urllib2.urlopen(req) content = resp.read() # remove useless files os.unlink(origin_img.name) os.unlink(save_img.name) if(content): return content return None if __name__ == "__main__": print get_image_text("http://www.liantu.com/tiaoma/eantitle.php?title=enl2dnhtUHNPMzQ0TUFpRk5sOTZseEZpYk1PeFYwWlBFQlc2a1dtZjcwaz0=")
26.173913
136
0.692137
230
1,806
5.247826
0.473913
0.037283
0.027341
0.057995
0.06628
0
0
0
0
0
0
0.033625
0.176633
1,806
68
137
26.558824
0.778077
0.120155
0
0
0
0
0.227215
0.041139
0
0
0
0
0
0
null
null
0
0.121951
null
null
0.02439
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
63181b955335972638426181671ca5d3dffa487d
385
py
Python
pyexcel_matplotlib/__init__.py
pyexcel/pyexcel-matplotlib
8771fcf3cc82164b50dc7ec0314838bf3de63e3b
[ "BSD-3-Clause" ]
null
null
null
pyexcel_matplotlib/__init__.py
pyexcel/pyexcel-matplotlib
8771fcf3cc82164b50dc7ec0314838bf3de63e3b
[ "BSD-3-Clause" ]
null
null
null
pyexcel_matplotlib/__init__.py
pyexcel/pyexcel-matplotlib
8771fcf3cc82164b50dc7ec0314838bf3de63e3b
[ "BSD-3-Clause" ]
null
null
null
""" pyexcel_matplotlib ~~~~~~~~~~~~~~~~~~~ chart drawing plugin for pyexcel :copyright: (c) 2016-2017 by Onni Software Ltd. :license: New BSD License, see LICENSE for further details """ from pyexcel.plugins import PyexcelPluginChain PyexcelPluginChain(__name__).add_a_renderer( relative_plugin_class_path='plot.MatPlotter', file_types=['svg', 'png'] )
22.647059
62
0.690909
44
385
5.795455
0.840909
0
0
0
0
0
0
0
0
0
0
0.025078
0.171429
385
16
63
24.0625
0.774295
0.467532
0
0
0
0
0.118644
0
0
0
0
0
0
1
0
true
0
0.2
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
63194ee4c304415890bdeb1468d2982220dc84d9
5,749
py
Python
settings/base.py
ankit-ak/django-ecommerce-1
248127526c03c7c0f25a2df84365a0d0199b9693
[ "MIT" ]
4
2021-04-06T16:50:57.000Z
2022-03-02T00:50:44.000Z
settings/base.py
ankit-ak/django-ecommerce-1
248127526c03c7c0f25a2df84365a0d0199b9693
[ "MIT" ]
null
null
null
settings/base.py
ankit-ak/django-ecommerce-1
248127526c03c7c0f25a2df84365a0d0199b9693
[ "MIT" ]
7
2021-02-22T08:07:20.000Z
2022-03-06T10:17:28.000Z
""" Django settings for petstore project. Generated by 'django-admin startproject' using Django 2.2.10. For more information on this file, see https://docs.djangoproject.com/en/2.2/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.2/ref/settings/ """ import os from django.contrib.messages import constants as messages # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.sites', 'django.contrib.humanize', # third party 'crispy_forms', 'allauth', 'allauth.account', 'storages', # local 'accounts.apps.AccountsConfig', 'pages.apps.PagesConfig', 'products.apps.ProductsConfig', 'basket.apps.BasketConfig', 'checkout.apps.CheckoutConfig', 'orders.apps.OrdersConfig', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', # 'whitenoise.middleware.WhiteNoiseMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', # user middleware 'basket.middleware.BasketMiddleware', ] ROOT_URLCONF = 'petstore.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'petstore.wsgi.application' # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases # Password validation # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.2/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # User defined settings AUTH_USER_MODEL = 'accounts.CustomUser' EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ] # AWS Setup for media store on Heroku USE_S3 = os.getenv('USE_S3') == 'TRUE' if USE_S3: # aws settings AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY') AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME') AWS_DEFAULT_ACL = None AWS_S3_CUSTOM_DOMAIN = f'{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com' AWS_S3_OBJECT_PARAMETERS = {'CacheControl': 'max-age=86400'} # s3 static settings STATIC_LOCATION = 'static' STATIC_URL = f'https://{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/' STATICFILES_STORAGE = 'petstore.storage_backends.StaticStorage' # s3 public media settings PUBLIC_MEDIA_LOCATION = 'media' MEDIA_URL = f'https://{AWS_S3_CUSTOM_DOMAIN}/{PUBLIC_MEDIA_LOCATION}/' DEFAULT_FILE_STORAGE = 'petstore.storage_backends.PublicMediaStorage' else: STATIC_URL = '/staticfiles/' STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') MEDIA_URL = '/mediafiles/' MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles') STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),) CRISPY_TEMPLATE_PACK = 'bootstrap4' # Bootstrap class mappings for django messages MESSAGE_TAGS = { messages.DEBUG: 'alert-info', messages.INFO: 'alert-info', messages.SUCCESS: 'alert-success', messages.WARNING: 'alert-warning', messages.ERROR: 'alert-danger', } # django-allauth config SITE_ID = 1 AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', 'allauth.account.auth_backends.AuthenticationBackend', ) LOGIN_REDIRECT_URL = 'home' ACCOUNT_LOGOUT_REDIRECT = 'home' ACCOUNT_SESSION_REMEMBER = True ACCOUNT_EMAIL_REQUIRED = True ACCOUNT_PRESERVE_USERNAME_CASING = False # custom forms to override allauth defaults ACCOUNT_FORMS = { 'signup': 'accounts.forms.CustomSignupForm', 'login': 'accounts.forms.CustomLoginForm', } ACCOUNT_USERNAME_REQUIRED = False ACCOUNT_AUTHENTICATION_METHOD = 'email' ACCOUNT_UNIQUE_EMAIL = True ACCOUNT_USER_MODEL_USERNAME_FIELD = None # stripe keys STRIPE_TEST_PUBLISHABLE_KEY = os.environ.get('STRIPE_TEST_PUBLISHABLE_KEY') STRIPE_TEST_SECRET_KEY = os.environ.get('STRIPE_TEST_SECRET_KEY') # whitenoise setting # STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
28.043902
91
0.728996
646
5,749
6.283282
0.362229
0.067258
0.033506
0.036955
0.152501
0.140429
0.068983
0.056171
0.029564
0
0
0.007993
0.151331
5,749
204
92
28.181373
0.823939
0.204905
0
0
1
0
0.485563
0.37073
0
0
0
0
0
1
0
false
0.04065
0.01626
0
0.01626
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
6322146c8d971464c6f726ebdba3a3d7a2540028
6,518
py
Python
mastiff/plugins/analysis/EXE/EXE-singlestring.py
tt1379/mastiff
04d569e4fa59513572e77c74b049cad82f9b0310
[ "Apache-2.0" ]
164
2015-02-09T18:19:26.000Z
2022-02-23T09:49:18.000Z
mastiff/plugins/analysis/EXE/EXE-singlestring.py
ashishhmittal/mastiff
04d569e4fa59513572e77c74b049cad82f9b0310
[ "Apache-2.0" ]
1
2016-05-20T16:21:33.000Z
2016-05-20T16:21:33.000Z
mastiff/plugins/analysis/EXE/EXE-singlestring.py
ashishhmittal/mastiff
04d569e4fa59513572e77c74b049cad82f9b0310
[ "Apache-2.0" ]
43
2015-03-03T11:15:58.000Z
2021-10-02T02:14:57.000Z
#!/usr/bin/env python """ Copyright 2012-2013 The MASTIFF Project, All Rights Reserved. This software, having been partly or wholly developed and/or sponsored by KoreLogic, Inc., is hereby released under the terms and conditions set forth in the project's "README.LICENSE" file. For a list of all contributors and sponsors, please refer to the project's "README.CREDITS" file. """ __doc__ = """ Single-byte string plug-in Plugin Type: EXE Purpose: Attackers have begun to obfuscate embedded strings by moving a single byte at a time into a character array. In assembler, it looks like: mov mem, 0x68 mov mem+4, 0x69 mov mem+8, 0x21 ... Using a strings program, these strings will not be found. This script looks for any strings embedded in this way and prints them out. It does this by looking through the file for C6 opcodes, which are the start of the "mov mem/reg, imm" instruction. It will then decode it, grab the value and create a string from it. Requirements: - distorm3 (http://code.google.com/p/distorm/) Output: None """ __version__ = "$Id: 6322146c8d971464c6f726ebdba3a3d7a2540028 $" import logging import re import os try: from distorm3 import Decode, Decode32Bits except ImportError, err: print "EXE-SingleString: Could not import distorm3: %s" % error import mastiff.plugins.category.exe as exe # Change the class name and the base class class SingleString(exe.EXECat): """Extract single-byte strings from an executable.""" def __init__(self): """Initialize the plugin.""" exe.EXECat.__init__(self) self.length = 3 self.raw = False def activate(self): """Activate the plugin.""" exe.EXECat.activate(self) def deactivate(self): """Deactivate the plugin.""" exe.EXECat.deactivate(self) def findMov(self, filename): """ look through the file for any c6 opcode (mov reg/mem, imm) when it finds one, decode it and put it into a dictionary """ #log = logging.getLogger('Mastiff.Plugins.' + self.name + '.findMov') f = open(filename,'rb') offset = 0 instructs = {} mybyte = f.read(1) while mybyte: if mybyte == "\xc6": # found a mov op - decode and record it f.seek(offset) mybyte = f.read(16) # p will come back as list of (offset, size, instruction, hexdump) p = Decode(offset, mybyte, Decode32Bits) # break up the mnemonic ma = re.match('(MOV) ([\S\s]+), ([x0-9a-fA-F]+)', p[0][2]) if ma is not None: instructs[offset] = [ma.group(1), ma.group(2), ma.group(3), p[0][1]] # mnemonic, size #log.debug( "MOV instructions detected: %x %s %d" % (offset,p[0][2],p[0][1]) ) f.seek(offset+1) mybyte = f.read(1) offset = offset + 1 f.close() return instructs def decodeBytes(self, instructs): """ Take in a dict of instructions - parse through each instruction and grab the strings """ #log = logging.getLogger('Mastiff.Plugins.' + self.name + '.decodeBytes') curString = "" curOffset = 0 strList = [] usedBytes = [] for off in sorted(instructs.keys()): if off not in usedBytes: # set up the new offset if needed if curOffset == 0: curOffset = off while off in instructs: usedBytes.append(off) hexVal = int(instructs[off][2], 16) opLen = instructs[off][3] # is hexVal out of range? if hexVal < 32 or hexVal > 126 and (hexVal != 10 or hexVal != 13 or hexVal != 9): # end of string #log.debug("%x non-string char - new string: %d: %s" % (curOffset, hexVal,curString)) strList.append([curOffset, curString]) curOffset = off + opLen curString = "" else: #add to string if not self.raw and hexVal == 10: # line feed curString = curString + "\\r" elif not self.raw and hexVal == 13: # return curString = curString + "\\n" elif not self.raw and hexVal == 9: # tab curString = curString + "\\t" else: curString = curString + chr(hexVal) off = off + opLen strList.append([curOffset, curString]) curOffset = 0 curString = "" usedBytes.append(off) return strList def analyze(self, config, filename): """Analyze the file.""" # sanity check to make sure we can run if self.is_activated == False: return False log = logging.getLogger('Mastiff.Plugins.' + self.name) log.info('Starting execution.') self.length = config.get_var(self.name, 'length') if self.length is None: self.length = 3 self.raw = config.get_bvar(self.name, 'raw') # find the bytes in the file instructs = self.findMov(filename) # now lets get the strings strlist = self.decodeBytes(instructs) self.output_file(config.get_var('Dir','log_dir'), strlist) return True def output_file(self, outdir, strlist): """Print output from analysis to a file.""" log = logging.getLogger('Mastiff.Plugins.' + self.name + '.output_file') # if the string is of the right len, print it outstr = "" for string in strlist: if len(string[1]) >= int(self.length): outstr = outstr + '0x%x: %s\n' % (string[0], string[1]) if len(outstr) > 0: try: outfile = open(outdir + os.sep + 'single-string.txt', 'w') except IOError, err: log.debug("Cannot open single-string.txt: %s" % err) return False outfile.write(outstr) outfile.close() else: log.debug('No single-byte strings found.') return True
31.640777
109
0.544799
775
6,518
4.550968
0.36
0.013609
0.021548
0.029487
0.097817
0.059541
0.046498
0
0
0
0
0.025184
0.35425
6,518
205
110
31.795122
0.812782
0.113378
0
0.186441
0
0
0.193308
0.008063
0
0
0.002419
0
0
0
null
null
0
0.059322
null
null
0.016949
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
6322cf5faf1311f6d38c18bbb9cc68ac52d6e045
2,874
py
Python
Libs/Scene Recognition/SceneRecognitionCNN.py
vpulab/Semantic-Guided-Scene-Attribution
1e247e48b549eb648d833050fb150f041948422d
[ "MIT" ]
3
2021-03-03T09:07:53.000Z
2021-07-19T10:44:32.000Z
Libs/Scene Recognition/SceneRecognitionCNN.py
vpulab/Semantic-Guided-Scene-Attribution
1e247e48b549eb648d833050fb150f041948422d
[ "MIT" ]
null
null
null
Libs/Scene Recognition/SceneRecognitionCNN.py
vpulab/Semantic-Guided-Scene-Attribution
1e247e48b549eb648d833050fb150f041948422d
[ "MIT" ]
1
2021-03-11T09:17:04.000Z
2021-03-11T09:17:04.000Z
import torch.nn as nn from torchvision.models import resnet class SceneRecognitionCNN(nn.Module): """ Generate Model Architecture """ def __init__(self, arch, scene_classes=1055): super(SceneRecognitionCNN, self).__init__() # --------------------------------# # Base Network # # ------------------------------- # if arch == 'ResNet-18': # ResNet-18 Network base = resnet.resnet18(pretrained=True) # Size parameters for ResNet-18 size_fc_RGB = 512 elif arch == 'ResNet-50': # ResNet-50 Network base = resnet.resnet50(pretrained=True) # Size parameters for ResNet-50 size_fc_RGB = 2048 # --------------------------------# # RGB Branch # # ------------------------------- # # First initial block self.in_block = nn.Sequential( nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False), nn.BatchNorm2d(64), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2, padding=1, return_indices=True) ) # Encoder self.encoder1 = base.layer1 self.encoder2 = base.layer2 self.encoder3 = base.layer3 self.encoder4 = base.layer4 # -------------------------------------# # RGB Classifier # # ------------------------------------ # self.dropout = nn.Dropout(0.3) self.avgpool = nn.AvgPool2d(7, stride=1) self.fc = nn.Linear(size_fc_RGB, scene_classes) # Loss self.criterion = nn.CrossEntropyLoss() def forward(self, x): """ Netowrk forward :param x: RGB Image :return: Scene recognition predictions """ # --------------------------------# # RGB Branch # # ------------------------------- # x, pool_indices = self.in_block(x) e1 = self.encoder1(x) e2 = self.encoder2(e1) e3 = self.encoder3(e2) e4 = self.encoder4(e3) # -------------------------------------# # RGB Classifier # # ------------------------------------ # act = self.avgpool(e4) act = act.view(act.size(0), -1) act = self.dropout(act) act = self.fc(act) return act def loss(self, x, target): """ Funtion to comput the loss :param x: Predictions obtained by the network :param target: Ground-truth scene recognition labels :return: Loss value """ # Check inputs assert (x.shape[0] == target.shape[0]) # Classification loss loss = self.criterion(x, target.long()) return loss
30.903226
81
0.451983
268
2,874
4.764925
0.414179
0.018794
0.021143
0.043853
0.057948
0.057948
0
0
0
0
0
0.037037
0.34238
2,874
92
82
31.23913
0.638624
0.325679
0
0
1
0
0.010011
0
0
0
0
0
0.025
1
0.075
false
0
0.05
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
63243b0ed7269143b65e929fe80aa6f12a71bb2a
1,309
py
Python
raduga/aws/ec2.py
tuxpiper/raduga
e63bdd8f9d4154c0ac0a72a1182da5d137e38514
[ "MIT" ]
null
null
null
raduga/aws/ec2.py
tuxpiper/raduga
e63bdd8f9d4154c0ac0a72a1182da5d137e38514
[ "MIT" ]
null
null
null
raduga/aws/ec2.py
tuxpiper/raduga
e63bdd8f9d4154c0ac0a72a1182da5d137e38514
[ "MIT" ]
null
null
null
from time import sleep class AWSEC2(object): def __init__(self, target): self.conn = target.get_ec2_conn() def get_instance_state(self, instance_id): instance = self.conn.get_only_instances(instance_id)[0] return instance.state def stop_instance(self, instance_id): self.conn.stop_instances(instance_id) def create_ami(self, instance_id, name, description, tags): instance = self.conn.get_only_instances(instance_id)[0] if instance.state != 'stopped': raise RuntimeError("Won't create AMI from non-stopped instance") image_id = self.conn.create_image(instance_id, name, description) sleep(1) # Add tags to the image self.conn.create_tags(image_id, tags) return image_id def get_ami_state(self, image_id): ami = self.conn.get_all_images(image_id)[0] return ami.state def find_ami(self, **tags): filters = dict(map(lambda (k,v): ("tag:"+k,v), tags.items())) results = self.conn.get_all_images(owners=['self'], filters=filters) if len(results) == 0: return None elif len(results) == 1: return results[0].id else: raise RuntimeError("More than ona AMI is matching the requested tags (??!)")
35.378378
88
0.638655
178
1,309
4.494382
0.348315
0.08
0.055
0.0475
0.1575
0.1075
0.1075
0.1075
0.1075
0
0
0.009165
0.249809
1,309
36
89
36.361111
0.805499
0.016043
0
0.068966
0
0
0.086314
0
0
0
0
0
0
0
null
null
0
0.034483
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
6327c56d9c670d44daabc2e832ec49f09adf169e
433
py
Python
tests/ext/test_ext_plugin.py
tomekr/cement
fece8629c48bcd598fd61d8aa7457a5df4c4f831
[ "BSD-3-Clause" ]
826
2015-01-09T13:23:35.000Z
2022-03-18T01:19:40.000Z
tests/ext/test_ext_plugin.py
tomekr/cement
fece8629c48bcd598fd61d8aa7457a5df4c4f831
[ "BSD-3-Clause" ]
316
2015-01-14T10:35:22.000Z
2022-03-08T17:18:10.000Z
tests/ext/test_ext_plugin.py
tomekr/cement
fece8629c48bcd598fd61d8aa7457a5df4c4f831
[ "BSD-3-Clause" ]
112
2015-01-10T15:04:26.000Z
2022-03-16T08:11:58.000Z
from cement.ext.ext_plugin import CementPluginHandler # module tests class TestCementPluginHandler(object): def test_subclassing(self): class MyPluginHandler(CementPluginHandler): class Meta: label = 'my_plugin_handler' h = MyPluginHandler() assert h._meta.interface == 'plugin' assert h._meta.label == 'my_plugin_handler' # app functionality and coverage tests
22.789474
53
0.681293
44
433
6.522727
0.613636
0.062718
0.076655
0.118467
0.167247
0
0
0
0
0
0
0
0.244804
433
18
54
24.055556
0.877676
0.113164
0
0
0
0
0.105263
0
0
0
0
0
0.222222
1
0.111111
false
0
0.111111
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
632d57e33c16941deaf1e4e00a7a754b488df9c5
702
py
Python
test/test_document.py
hibtc/madseq
1e218726a01a1817464f84f0ce887be3186f2c08
[ "MIT" ]
null
null
null
test/test_document.py
hibtc/madseq
1e218726a01a1817464f84f0ce887be3186f2c08
[ "MIT" ]
2
2015-05-25T00:42:49.000Z
2015-05-25T00:43:19.000Z
test/test_document.py
hibtc/madseq
1e218726a01a1817464f84f0ce887be3186f2c08
[ "MIT" ]
null
null
null
# test utilities import unittest from decimal import Decimal # tested module import madseq class Test_Document(unittest.TestCase): def test_parse_line(self): parse = madseq.Document.parse_line Element = madseq.Element self.assertEqual(list(parse(' \t ')), ['']) self.assertEqual(list(parse(' \t ! a comment; ! ')), ['! a comment; ! ']) self.assertEqual(list(parse(' use, z=23.23e2; k: z; !')), ['!', Element(None, 'use', {'z': Decimal('23.23e2')}), Element('k', 'z', {})]) if __name__ == '__main__': unittest.main()
22.645161
74
0.497151
69
702
4.884058
0.449275
0.133531
0.169139
0.21365
0.148368
0
0
0
0
0
0
0.021882
0.349003
702
30
75
23.4
0.715536
0.039886
0
0
0
0
0.125186
0
0
0
0
0
0.176471
1
0.058824
false
0
0.176471
0
0.294118
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
633116f53d75e450606cb590875b6e5bf2ea6638
4,917
py
Python
opensanctions/crawlers/eu_fsf.py
quantumchips/opensanctions
56f19dcfea704480e56a311d2a807c8446237457
[ "MIT" ]
102
2018-03-22T16:33:17.000Z
2021-01-20T07:39:43.000Z
opensanctions/crawlers/eu_fsf.py
quantumchips/opensanctions
56f19dcfea704480e56a311d2a807c8446237457
[ "MIT" ]
101
2021-02-12T18:26:16.000Z
2022-01-27T14:01:53.000Z
opensanctions/crawlers/eu_fsf.py
quantumchips/opensanctions
56f19dcfea704480e56a311d2a807c8446237457
[ "MIT" ]
50
2018-05-11T18:00:49.000Z
2021-01-26T12:11:20.000Z
from prefixdate import parse_parts from opensanctions import helpers as h from opensanctions.util import remove_namespace def parse_address(context, el): country = el.get("countryDescription") if country == "UNKNOWN": country = None # context.log.info("Addrr", el=el) return h.make_address( context, street=el.get("street"), po_box=el.get("poBox"), city=el.get("city"), place=el.get("place"), postal_code=el.get("zipCode"), region=el.get("region"), country=country, country_code=el.get("countryIso2Code"), ) def parse_entry(context, entry): subject_type = entry.find("./subjectType") schema = context.lookup_value("subject_type", subject_type.get("code")) if schema is None: context.log.warning("Unknown subject type", type=subject_type) return entity = context.make(schema) entity.id = context.make_slug(entry.get("euReferenceNumber")) entity.add("notes", entry.findtext("./remark")) entity.add("topics", "sanction") sanction = h.make_sanction(context, entity) regulation = entry.find("./regulation") source_url = regulation.findtext("./publicationUrl", "") sanction.set("sourceUrl", source_url) sanction.add("program", regulation.get("programme")) sanction.add("reason", regulation.get("numberTitle")) sanction.add("startDate", regulation.get("entryIntoForceDate")) sanction.add("listingDate", regulation.get("publicationDate")) for name in entry.findall("./nameAlias"): if entry.get("strong") == "false": entity.add("weakAlias", name.get("wholeName")) else: entity.add("name", name.get("wholeName")) entity.add("title", name.get("title"), quiet=True) entity.add("firstName", name.get("firstName"), quiet=True) entity.add("middleName", name.get("middleName"), quiet=True) entity.add("lastName", name.get("lastName"), quiet=True) entity.add("position", name.get("function"), quiet=True) gender = h.clean_gender(name.get("gender")) entity.add("gender", gender, quiet=True) for node in entry.findall("./identification"): type = node.get("identificationTypeCode") schema = "Passport" if type == "passport" else "Identification" passport = context.make(schema) passport.id = context.make_id("ID", entity.id, node.get("logicalId")) passport.add("holder", entity) passport.add("authority", node.get("issuedBy")) passport.add("type", node.get("identificationTypeDescription")) passport.add("number", node.get("number")) passport.add("number", node.get("latinNumber")) passport.add("startDate", node.get("issueDate")) passport.add("startDate", node.get("issueDate")) passport.add("country", node.get("countryIso2Code")) passport.add("country", node.get("countryDescription")) for remark in node.findall("./remark"): passport.add("summary", remark.text) context.emit(passport) for node in entry.findall("./address"): address = parse_address(context, node) h.apply_address(context, entity, address) for child in node.getchildren(): if child.tag in ("regulationSummary"): continue elif child.tag == "remark": entity.add("notes", child.text) elif child.tag == "contactInfo": prop = context.lookup_value("contact_info", child.get("key")) if prop is None: context.log.warning("Unknown contact info", node=child) else: entity.add(prop, child.get("value")) else: context.log.warning("Unknown address component", node=child) for birth in entry.findall("./birthdate"): partialBirth = parse_parts( birth.get("year"), birth.get("month"), birth.get("day") ) entity.add("birthDate", birth.get("birthdate")) entity.add("birthDate", partialBirth) address = parse_address(context, birth) if address is not None: entity.add("birthPlace", address.get("full")) entity.add("country", address.get("country")) for node in entry.findall("./citizenship"): entity.add("nationality", node.get("countryIso2Code"), quiet=True) entity.add("nationality", node.get("countryDescription"), quiet=True) context.emit(entity, target=True, unique=True) context.emit(sanction) def crawl(context): path = context.fetch_resource("source.xml", context.dataset.data.url) context.export_resource(path, "text/xml", title=context.SOURCE_TITLE) doc = context.parse_resource_xml(path) doc = remove_namespace(doc) for entry in doc.findall(".//sanctionEntity"): parse_entry(context, entry)
40.636364
77
0.630466
553
4,917
5.54792
0.256781
0.052803
0.022816
0.029335
0.113103
0.04661
0.027053
0.027053
0
0
0
0.00078
0.218019
4,917
120
78
40.975
0.797139
0.006508
0
0.048544
0
0
0.202744
0.010444
0
0
0
0
0
1
0.029126
false
0.135922
0.029126
0
0.07767
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
2d47ab385e2aad6d3b4b15b6488db399d09b39ca
3,784
py
Python
scripts/supervised/exam_real_robot_data/analysis_icm_model_real_bot.py
fredshentu/public_model_based_controller
9301699bc56aa49ba5c699f7d5be299046a8aa0c
[ "MIT" ]
null
null
null
scripts/supervised/exam_real_robot_data/analysis_icm_model_real_bot.py
fredshentu/public_model_based_controller
9301699bc56aa49ba5c699f7d5be299046a8aa0c
[ "MIT" ]
null
null
null
scripts/supervised/exam_real_robot_data/analysis_icm_model_real_bot.py
fredshentu/public_model_based_controller
9301699bc56aa49ba5c699f7d5be299046a8aa0c
[ "MIT" ]
null
null
null
""" Since the size of real robot data is huge, we first go though all data then save loss array, then sort loss array. Finally we use the indexes to find the corresponding graphs """ import time from rllab.core.serializable import Serializable from numpy.linalg import norm from numpy import mean from numpy import std import numpy as np import csv, os import scipy.misc as scm # import pickle import tensorflow as tf from rllab.policies.uniform_control_policy import UniformControlPolicy from rllab.sampler.utils import rollout from railrl.policies.cmaes_icm import CMAESPolicy import argparse import matplotlib.pyplot as plt import pickle OBS_INPUT_SHAPE = [128,128,6] ACTION_SHAPE = [4] STATE_SHAPE = [8] #return img, action, next_img, state, next_state def load_data(filename): obs = [] next_obs = [] action = [] file = open(filename,'rb') load_dict = pickle.load(file,encoding='latin1') states = load_dict["states"] images = load_dict["images"] actions = load_dict["action_list"] assert(len(images) == 601) assert(len(actions) == 600) assert(len(states) == 601) return images[:600], actions,images[1:], states[:600], states[1:] if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('file', type=str, help='path to the data file, should be pickle') with tf.Session() as sess: data = joblib.load(args.file) _conv_encoder = data["encoder"] _inverse_model = data["inverse_model"] _forward_model = data["forward_model"] _state_encoder = data["state_encoder"] s1_ph = tf.placeholder(tf.float32, [None] + OBS_INPUT_SHAPE)/255 - 0.5 s2_ph = tf.placeholder(tf.float32, [None] + OBS_INPUT_SHAPE)/255 - 0.5 a_ph = tf.placeholder(tf.float32, [None, 4]) * [1./1023, 1./249, 1./249, 1./1023] arm_state1_ph = tf.placeholder(tf.float32, [None, 8]) / 2048 arm_state2_ph = tf.placeholder(tf.float32, [None, 8]) / 2048 encoder1 = _conv_encoder.get_weight_tied_copy(observation_input=s1_ph) encoder2 = _conv_encoder.get_weight_tied_copy(observation_input=s2_ph) state_encoder1 = _state_encoder.get_weight_tied_copy(observation_input=arm_state1_ph) state_encoder2 = _state_encoder.get_weight_tied_copy(observation_input=arm_state2_ph) feature1 = tf.concat(1, [encoder1.output, state_encoder1.output]) feature2 = tf.concat(1, [encoder2.output, state_encoder2.output]) inverse_model = _inverse_model.get_weight_tied_copy(feature_input1=feature1, feature_input2=feature2) forward_model = _forward_model.get_weight_tied_copy(feature_input=feature1, action_input=a_ph) def get_forward_loss(obs, state, next_obs, next_state, actions): forward_loss = sess.run( tf.reduce_mean(tf.square( encoder2.output - forward_model.output ), axis=1), feed_dict={ s1_ph: obs, s2_ph: next_obs, a_ph: actions, arm_state1_ph = state, arm_state2_ph = next_state, } ) return forward_loss # Call rllab rollout for parallel while True: plt.clf() plt.ion() ob = env.reset() next_ob = None x = [] y = [] for t in range(env.wrapped_env._wrapped_env.env.spec.max_episode_steps): action, _ = policy.get_action(ob) next_ob, reward, done, env_infos = env.step(action) env.render() forward_loss = get_forward_loss([ob], [next_ob], [action]) if done: ob = env.reset() else: ob = next_ob x.append(t) y.append(forward_loss) # import pdb; pdb.set_trace() flag = env_infos["contact_reward"] if flag == 1: plt.title("touching table") if flag == 0: plt.title("touching nothing") else: plt.title("touching box") plt.plot(x, y, c="blue") plt.pause(0.05) # print ("Should plot") plt.show()
32.067797
92
0.700317
552
3,784
4.556159
0.353261
0.021471
0.031014
0.040557
0.170179
0.170179
0.135984
0.135984
0.074751
0.036581
0
0.036269
0.183932
3,784
118
93
32.067797
0.778174
0.037526
0
0.040816
0
0
0.054414
0
0
0
0
0
0.030612
0
null
null
0
0.153061
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2d493273ba4aac5351552304e302799fbea8feee
740
py
Python
Mentorama/Modulo 3 - POO/Quadrado.py
MOURAIGOR/python
b267f8ef277a385e3e315e88a22390512bf1e101
[ "MIT" ]
null
null
null
Mentorama/Modulo 3 - POO/Quadrado.py
MOURAIGOR/python
b267f8ef277a385e3e315e88a22390512bf1e101
[ "MIT" ]
null
null
null
Mentorama/Modulo 3 - POO/Quadrado.py
MOURAIGOR/python
b267f8ef277a385e3e315e88a22390512bf1e101
[ "MIT" ]
null
null
null
class Quadrado: def __init__(self, lado): self.tamanho_lado = lado def mudar_valor_lado(self, novo_lado): lado = novo_lado self.tamanho_lado = novo_lado def retornar_valor_lado(self, retorno): self.tamanho_lado = retorno print(retorno) def calcular_area(self, area): self.tamanho_lado = area print(area*area) quadrado = Quadrado(6) print('Tamanho atual é:') print(quadrado.tamanho_lado) print('----------------') quadrado.mudar_valor_lado(3) print('Novo tamanho é:') print(quadrado.tamanho_lado) print('----------------') print('Tamanho atual:') quadrado.retornar_valor_lado(3) print('----------------') print('Total da area ficou em :') quadrado.calcular_area(3)
24.666667
43
0.647297
94
740
4.851064
0.244681
0.144737
0.131579
0.083333
0.131579
0.131579
0
0
0
0
0
0.006579
0.178378
740
29
44
25.517241
0.743421
0
0
0.2
0
0
0.158108
0
0
0
0
0
0
1
0.16
false
0
0
0
0.2
0.44
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
2d598f2e4196c937db359c1fb681b55186a150c3
2,084
py
Python
generator/request.py
WulffHunter/log_generator
47b8ab77d003494aa21b2c2ab85d1d21f7bae8fc
[ "MIT" ]
5
2021-02-12T15:52:59.000Z
2021-05-26T13:22:12.000Z
generator/request.py
WulffHunter/log_generator
47b8ab77d003494aa21b2c2ab85d1d21f7bae8fc
[ "MIT" ]
1
2021-11-03T10:10:58.000Z
2021-11-05T11:43:16.000Z
generator/request.py
WulffHunter/log_generator
47b8ab77d003494aa21b2c2ab85d1d21f7bae8fc
[ "MIT" ]
2
2021-11-07T08:12:46.000Z
2022-02-10T01:13:47.000Z
from faker import Faker import random import parameters from utils import chance_choose, chance from uri_generator import gen_path, uri_extensions, gen_uri_useable # TODO: Continue to expand this list with the proper formats for other application # layer protocols (e.g. FTP, SSH, SMTP...) protocols = ['HTTP/1.0', 'HTTP/1.1', 'HTTP/2'] common_methods = ['GET', 'PUT', 'POST', 'DELETE'] # Faker is passed in as an argument to prevent unnecessary re-decleration, # but is not needed to make this method run. def gen_req_method(test_mode=False, faker=None): if faker is None: faker = Faker() return chance_choose( random.choice(common_methods), faker.http_method(), parameters.frequency['common_http'] / 100) def gen_uri_path(test_mode=False): # TODO: Continue extending the possible URI paths and file # extension types # TODO: Add in querystrings # This format allows for choice of a URI path or a document path_options = [ gen_path(test_mode), '{}{}'.format( gen_path(test_mode), random.choice(uri_extensions) ) ] return random.choice(path_options) def gen_querystring(test_mode=False): # There's an 80% chance that a querystring will be non-existant if chance(parameters.frequency['empty_querystring']): return '' queries = [] for _ in range( random.randint( 1, parameters.max_val['querystring_elements'])): queries.append( '{}={}'.format( gen_uri_useable(), gen_uri_useable())) querystring = '&'.join(queries) return '?{}'.format(querystring) def gen_req_protocol(test_mode=False): return random.choice(protocols) def gen_request(test_mode=False): fake = Faker() # 90% chance of being a common method method = gen_req_method(fake) path = gen_uri_path(test_mode) querystring = gen_querystring() protocol = gen_req_protocol() return '{} {}{} {}'.format(method, path, querystring, protocol)
24.232558
82
0.65499
267
2,084
4.93633
0.411985
0.048558
0.049317
0.021244
0.027314
0
0
0
0
0
0
0.008202
0.239443
2,084
85
83
24.517647
0.823344
0.236084
0
0.043478
0
0
0.068944
0
0
0
0
0.011765
0
1
0.108696
false
0
0.108696
0.021739
0.347826
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
2d5abdd3abf9c29ea242210df43416a2eeef80b9
787
py
Python
leetcode/binary_search.py
verthais/exercise-python
d989647e8fbfe8a79b9b5f2c3ab003715d238851
[ "MIT" ]
null
null
null
leetcode/binary_search.py
verthais/exercise-python
d989647e8fbfe8a79b9b5f2c3ab003715d238851
[ "MIT" ]
null
null
null
leetcode/binary_search.py
verthais/exercise-python
d989647e8fbfe8a79b9b5f2c3ab003715d238851
[ "MIT" ]
null
null
null
def binary_search(collection, lhs, rhs, value): if rhs > lhs: mid = lhs + (rhs - lhs) // 2 if collection[mid] == value: return mid if collection[mid] > value: return binary_search(collection, lhs, mid-1, value) return binary_search(collection, mid+1, rhs, value) return -1 def eq(exp, val): assert exp == val, f'Expected: {exp}, got value {val}' def main(): tests = [ (0, 5, [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]), (8, 13, [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]), (8, 9, [1,2,3,4,5,6,7,8,9]), ] for expected, value, collection in tests: eq(expected, binary_search(collection, 0, len(collection), value)) if __name__ == '__main__': main() print('success')
24.59375
74
0.537484
117
787
3.512821
0.350427
0.116788
0.214112
0.029197
0.360097
0.087591
0.087591
0.087591
0.087591
0.087591
0
0.099278
0.296061
787
32
75
24.59375
0.642599
0
0
0
0
0
0.059645
0
0
0
0
0
0.045455
1
0.136364
false
0
0
0
0.318182
0.045455
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2d601a610adba2a9dbf87218ce0ac031d70916ca
9,070
py
Python
server/utils/server_db.py
DoctorChe/Python_DataBase_PyQT
6f65bad52edf9afa8cfce9689f7e88f87d420d9d
[ "MIT" ]
1
2019-08-07T20:08:32.000Z
2019-08-07T20:08:32.000Z
server/utils/server_db.py
DoctorChe/Python_DataBase_PyQT
6f65bad52edf9afa8cfce9689f7e88f87d420d9d
[ "MIT" ]
6
2019-08-08T11:53:09.000Z
2019-09-11T14:45:59.000Z
server/utils/server_db.py
DoctorChe/Python_DataBase_PyQT
6f65bad52edf9afa8cfce9689f7e88f87d420d9d
[ "MIT" ]
null
null
null
from contextlib import contextmanager from sqlalchemy import MetaData from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from .config_server import SERVER_DATABASE engine = create_engine(SERVER_DATABASE) Base = declarative_base(metadata=MetaData(bind=engine)) Session = sessionmaker(bind=engine) @contextmanager def session_scope(): session = Session() try: yield session session.commit() except: session.rollback() raise finally: session.close() # # Класс - серверная база данных: # class ServerStorage: # Base = declarative_base() # # # Класс - отображение таблицы всех пользователей # # Экземпляр этого класса = запись в таблице AllUsers # class AllUsers(Base): # __tablename__ = "users" # id = Column(Integer, primary_key=True) # name = Column(String, unique=True) # last_login = Column(String) # contacts = relationship("Contact", back_populates="user") # # def __init__(self, username): # self.id = None # self.name = username # self.last_login = datetime.datetime.now() # # # Класс - отображение таблицы активных пользователей: # # Экземпляр этого класса = запись в таблице ActiveUsers # class ActiveUsers(Base): # __tablename__ = "active_users" # id = Column(Integer, primary_key=True) # user = Column(ForeignKey("users.id"), unique=True) # ip_address = Column(String) # port = Column(Integer) # login_time = Column(DateTime) # # def __init__(self, user_id, ip_address, port, login_time): # self.id = None # self.user = user_id # self.ip_address = ip_address # self.port = port # self.login_time = login_time # # # Класс - отображение таблицы истории входов # # Экземпляр этого класса = запись в таблице LoginHistory # class LoginHistory(Base): # __tablename__ = "login_history" # id = Column(Integer, primary_key=True) # name = Column(ForeignKey("users.id")) # date_time = Column(DateTime) # ip = Column(String) # port = Column(Integer) # # def __init__(self, name, date, ip, port): # self.id = None # self.name = name # self.date_time = date # self.ip = ip # self.port = port # # class Contact(Base): # __tablename__ = "contact" # id = Column(Integer, primary_key=True, autoincrement=True) # name = Column(String) # information = Column(String) # user_id = Column(Integer, ForeignKey("users.id")) # user = relationship("AllUsers", back_populates="contacts") # # def __init__(self, contact_name, user_id, information): # self.id = None # self.name = contact_name # self.information = information # self.user_id = user_id # # def __init__(self): # # Создаём движок базы данных # # SERVER_DATABASE - sqlite:///server/db/server_db.sqlite3 # # echo=False - отключаем ведение лога (вывод sql-запросов) # # pool_recycle - По умолчанию соединение с БД через 8 часов простоя обрывается. # # Чтобы это не случилось нужно добавить опцию pool_recycle = 7200 (переустановка соединения через 2 часа) # self.database_engine = create_engine(SERVER_DATABASE, echo=False, pool_recycle=7200) # # # Метаданные доступны через класс Base # self.metadata = self.Base.metadata # # # Создаём таблицы # self.metadata.create_all(self.database_engine) # # # Создаём сессию # Session = sessionmaker(bind=self.database_engine) # self.session = Session() # # # Если в таблице активных пользователей есть записи, то их необходимо удалить # # Когда устанавливаем соединение, очищаем таблицу активных пользователей # self.session.query(self.ActiveUsers).delete() # self.session.commit() # # # Функция выполняющяяся при входе пользователя, записывает в базу факт входа # def user_login(self, username, ip_address, port): # # Запрос в таблицу пользователей на наличие там пользователя с таким именем # result = self.session.query(self.AllUsers).filter_by(name=username) # # # Если имя пользователя уже присутствует в таблице, обновляем время последнего входа # if result.count(): # user = result.first() # user.last_login = datetime.datetime.now() # # Если нету, то создаздаём нового пользователя # else: # # Создаем экземпляр класса self.AllUsers, через который передаем данные в таблицу # user = self.AllUsers(username) # self.session.add(user) # # Комит здесь нужен, чтобы присвоился ID # self.session.commit() # # # Теперь можно создать запись в таблицу активных пользователей о факте входа. # # Создаем экземпляр класса self.ActiveUsers, через который передаем данные в таблицу # new_active_user = self.ActiveUsers(user.id, ip_address, port, datetime.datetime.now()) # self.session.add(new_active_user) # # # и сохранить в историю входов # # Создаем экземпляр класса self.LoginHistory, через который передаем данные в таблицу # history = self.LoginHistory(user.id, datetime.datetime.now(), ip_address, port) # self.session.add(history) # # # Сохраняем изменения # self.session.commit() # # # Функция фиксирующая отключение пользователя # def user_logout(self, username): # # Запрашиваем пользователя, что покидает нас # # получаем запись из таблицы AllUsers # user = self.session.query(self.AllUsers).filter_by(name=username).first() # # # Удаляем его из таблицы активных пользователей. # # Удаляем запись из таблицы ActiveUsers # self.session.query(self.ActiveUsers).filter_by(user=user.id).delete() # # # Применяем изменения # self.session.commit() # # # Функция возвращает список известных пользователей со временем последнего входа. # def users_list(self): # query = self.session.query( # self.AllUsers.name, # self.AllUsers.last_login, # ) # # Возвращаем список кортежей # return query.all() # # # Функция возвращает список активных пользователей # def active_users_list(self): # # Запрашиваем соединение таблиц и собираем кортежи имя, адрес, порт, время. # query = self.session.query( # self.AllUsers.name, # self.ActiveUsers.ip_address, # self.ActiveUsers.port, # self.ActiveUsers.login_time # ).join(self.AllUsers) # # Возвращаем список кортежей # return query.all() # # # Функция возвращающая историю входов по пользователю или всем пользователям # def login_history(self, username=None): # # Запрашиваем историю входа # query = self.session.query(self.AllUsers.name, # self.LoginHistory.date_time, # self.LoginHistory.ip, # self.LoginHistory.port # ).join(self.AllUsers) # # Если было указано имя пользователя, то фильтруем по нему # if username: # query = query.filter(self.AllUsers.name == username) # return query.all() # # # # Отладка # if __name__ == "__main__": # SERVER_DATABASE = "sqlite:///server_db.sqlite3" # test_db = ServerStorage() # # выполняем 'подключение' пользователя # test_db.user_login("client_1", "192.168.1.4", 8888) # test_db.user_login("client_2", "192.168.1.5", 7777) # # выводим список кортежей - активных пользователей # print(test_db.active_users_list()) # # выполянем 'отключение' пользователя # test_db.user_logout("client_1") # # выводим список активных пользователей # print(test_db.active_users_list()) # # запрашиваем историю входов по пользователю # test_db.login_history("client_1") # # выводим список известных пользователей # print(test_db.users_list()) # test_db.add_contact("client_1", "client_2") # test_db.user_login("client_3", "192.168.1.5", 9999) # test_db.add_contact("client_1", "client_3") # test_db.add_contact("client_1", "client_3") # print(test_db.get_contacts("client_1")) # test_db.remove_contact("client_1", "client_2") # print(test_db.get_contacts("client_1")) # print(f'{test_db.get_contact("client_1", "client_3").name} ' # f'- info: {test_db.get_contact("client_1", "client_3").information}') # test_db.update_contact("client_1", "client_3", "New information") # print(f'{test_db.get_contact("client_1", "client_3").name} ' # f'- info: {test_db.get_contact("client_1", "client_3").information}')
40.672646
115
0.634179
1,002
9,070
5.55988
0.249501
0.02154
0.022617
0.03231
0.300485
0.211452
0.167474
0.111829
0.048106
0.030874
0
0.010889
0.26086
9,070
222
116
40.855856
0.820107
0.885998
0
0
0
0
0
0
0
0
0
0
0
1
0.05
false
0
0.3
0
0.35
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2d63bc571680f79f12326140ffc844c72f08a20c
418
py
Python
DataWorkflow/file_deletion/migrations/0005_maxlength_filename.py
Swiss-Polar-Institute/data-workflow
4c7fee1d78d67512ae6710449e625fd945468dd9
[ "MIT" ]
null
null
null
DataWorkflow/file_deletion/migrations/0005_maxlength_filename.py
Swiss-Polar-Institute/data-workflow
4c7fee1d78d67512ae6710449e625fd945468dd9
[ "MIT" ]
12
2019-10-25T15:01:06.000Z
2021-09-22T18:02:03.000Z
DataWorkflow/file_deletion/migrations/0005_maxlength_filename.py
Swiss-Polar-Institute/data-workflow
4c7fee1d78d67512ae6710449e625fd945468dd9
[ "MIT" ]
null
null
null
# Generated by Django 2.2.6 on 2019-10-25 19:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('file_deletion', '0004_maxlength_etag'), ] operations = [ migrations.AlterField( model_name='deletedfile', name='filename', field=models.CharField(help_text='Filename', max_length=256), ), ]
22
73
0.617225
45
418
5.6
0.822222
0
0
0
0
0
0
0
0
0
0
0.071895
0.267943
418
18
74
23.222222
0.751634
0.107656
0
0
1
0
0.15903
0
0
0
0
0
0
1
0
false
0
0.083333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2d6468482d56c0366eac034ecc843d9749364fb7
1,459
py
Python
src/python/procyon/types.py
orbea/procyon
469d94427d3b6e7cc2ab93606bdf968717a49150
[ "Apache-2.0" ]
null
null
null
src/python/procyon/types.py
orbea/procyon
469d94427d3b6e7cc2ab93606bdf968717a49150
[ "Apache-2.0" ]
null
null
null
src/python/procyon/types.py
orbea/procyon
469d94427d3b6e7cc2ab93606bdf968717a49150
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2017 The Procyon Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function, unicode_literals import enum from . import py3 class Type(enum.Enum): NULL = 0 BOOL = 1 INT = 2 FLOAT = 3 DATA = 4 STRING = 5 ARRAY = 6 MAP = 7 def typeof(x): if x is None: return Type.NULL elif (x is True) or (x is False): return Type.BOOL elif isinstance(x, (int, py3.long)): return Type.INT elif isinstance(x, float): return Type.FLOAT elif isinstance(x, (bytes, bytearray, memoryview)): return Type.DATA elif isinstance(x, py3.unicode): return Type.STRING elif isinstance(x, (tuple, list)): return Type.ARRAY elif isinstance(x, dict): return Type.MAP else: raise TypeError("%r is not Procyon-serializable" % x)
27.018519
82
0.669637
211
1,459
4.597156
0.563981
0.082474
0.092784
0.03299
0
0
0
0
0
0
0
0.018987
0.241947
1,459
53
83
27.528302
0.858047
0.411241
0
0
0
0
0.035587
0
0
0
0
0
0
1
0.032258
false
0
0.096774
0
0.677419
0.032258
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
2d6c392d23be86039709559e2b39f5d2016733c8
1,538
py
Python
voicebox_project.py
raccoonML/audiotools
9b378a7e4b136cdb5e1b7a048f8d35794880a4b0
[ "MIT" ]
null
null
null
voicebox_project.py
raccoonML/audiotools
9b378a7e4b136cdb5e1b7a048f8d35794880a4b0
[ "MIT" ]
null
null
null
voicebox_project.py
raccoonML/audiotools
9b378a7e4b136cdb5e1b7a048f8d35794880a4b0
[ "MIT" ]
null
null
null
import librosa import numpy as np import audio from hparams import hparams """ This helps implement a user interface for a vocoder. Currently this is Griffin-Lim but can be extended to different vocoders. Required elements for the vocoder UI are: self.sample_rate self.source_action self.vocode_action """ class Voicebox_Project: def __init__(self): # Property needed for voicebox self.sample_rate = hparams.sample_rate # Initialization for project self.source_spec = None """ The following action methods are called by Voicebox on button press Source: [Load] --> source_action Vocode: [Vocode] --> vocode_action """ def source_action(self, wav): # The vocoder toolbox also vocodes the spectrogram with Griffin-Lim for comparison. # Inputs: wav (from voicebox) # Outputs: spec, wav_GL, spec_GL (to voicebox) self.source_spec = audio.melspectrogram(wav, hparams) wav_GL = audio.inv_mel_spectrogram(self.source_spec, hparams) spec_GL = audio.melspectrogram(wav_GL, hparams) return self.source_spec.T, wav_GL, spec_GL.T def vocode_action(self): # For this sample vocoder project, we will use Griffin-Lim as the vocoder. # Other projects will substitute an actual neural vocoder. # Inputs: None # Outputs: wav, spec (to voicebox) wav = audio.inv_mel_spectrogram(self.source_spec, hparams) spec = audio.melspectrogram(wav, hparams) return wav, spec.T
32.041667
91
0.692458
204
1,538
5.073529
0.392157
0.057971
0.067633
0.021256
0.150725
0.090821
0.090821
0.090821
0.090821
0
0
0
0.236021
1,538
47
92
32.723404
0.880851
0.250975
0
0
0
0
0
0
0
0
0
0
0
1
0.176471
false
0
0.235294
0
0.588235
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
2d7be3678b02b1a94e7d1293d4c7eaa5d0726a38
1,602
py
Python
Numbers/alarm.py
arindampradhan/Projects
5677e05ea56ffea2334d65b275e8920b14980ac2
[ "MIT" ]
10
2016-07-11T22:09:46.000Z
2021-12-22T18:59:07.000Z
Numbers/alarm.py
arindampradhan/Projects
5677e05ea56ffea2334d65b275e8920b14980ac2
[ "MIT" ]
1
2015-02-26T17:00:13.000Z
2015-02-26T17:00:13.000Z
Numbers/alarm.py
arindampradhan/Projects
5677e05ea56ffea2334d65b275e8920b14980ac2
[ "MIT" ]
41
2015-03-04T00:14:53.000Z
2022-01-19T14:16:24.000Z
""" Alarm Clock - A simple clock where it plays a sound after X number of minutes/seconds or at a particular time. Dependencies: pyglet pip install pyglet """ import time import winsound import pyglet def play(hh, mm): not_alarmed = 1 while(not_alarmed): cur_time = list(time.localtime()) # get the time right now hour = cur_time[3] # find the hour minute = cur_time[4] # and the minute if hour == hh and minute == mm: song = pyglet.media.load('bin/sound.wav') song.play() # play the sound pyglet.app.run() not_alarmed = 0 # stop the loop if __name__ == '__main__': print """ 1. Play sound after X minutes 2. Play sound at an exact time """ choice = input('What do you want to do? ') if choice == 1: mins = input('How many minutes from now? ') hh_from_now = mins / 60 # if minutes > 60, this will adjust the hours mm_from_now = mins % 60 # and then the minutes cur_time = list(time.localtime()) # get the time right now hour = cur_time[3] # find the current hour minute = cur_time[4] # and the current minute hh = (hour + hh_from_now+(minute+mm_from_now)/60) % 24 # cycle through the clock if hh > 24 mm = (minute + mm_from_now) % 60 # cycle through the clock if mm > 60 play(hh, mm) elif choice == 2: hh = input('What hour do you want to wake up (0-23)? ') mm = input('What minute do you want to wake up (0-59)? ') play(hh, mm)
32.04
100
0.581773
240
1,602
3.770833
0.366667
0.046409
0.026519
0.036464
0.313812
0.227624
0.227624
0.134807
0.134807
0.134807
0
0.029466
0.322097
1,602
49
101
32.693878
0.803867
0.176654
0
0.235294
0
0
0.21043
0
0
0
0
0
0
0
null
null
0
0.088235
null
null
0.029412
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2d7e4675f7508f5659dc4db59ff7f4d23a5f9db7
642
py
Python
reframe/utility/json.py
stevenvdb/reframe
be9de13dc16d2c9fb4b760e07ac986b04f8ed880
[ "BSD-3-Clause" ]
null
null
null
reframe/utility/json.py
stevenvdb/reframe
be9de13dc16d2c9fb4b760e07ac986b04f8ed880
[ "BSD-3-Clause" ]
null
null
null
reframe/utility/json.py
stevenvdb/reframe
be9de13dc16d2c9fb4b760e07ac986b04f8ed880
[ "BSD-3-Clause" ]
null
null
null
# Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich) # ReFrame Project Developers. See the top-level LICENSE file for details. # # SPDX-License-Identifier: BSD-3-Clause import json class _ReframeJsonEncoder(json.JSONEncoder): def default(self, obj): if hasattr(obj, '__rfm_json_encode__'): return obj.__rfm_json_encode__() return json.JSONEncoder.default(self, obj) def dump(obj, fp, **kwargs): kwargs['cls'] = _ReframeJsonEncoder return json.dump(obj, fp, **kwargs) def dumps(obj, **kwargs): kwargs['cls'] = _ReframeJsonEncoder return json.dumps(obj, **kwargs)
25.68
76
0.705607
80
642
5.475
0.5625
0.068493
0.063927
0.073059
0.296804
0.196347
0
0
0
0
0
0.017078
0.179128
642
24
77
26.75
0.814042
0.286604
0
0.166667
0
0
0.055188
0
0
0
0
0
0
1
0.25
false
0
0.083333
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
2d805588965de2f3350c8e40dca405d56b0fa7da
1,271
py
Python
nginx-with-mtls-and-appserver/appserver/app.py
fshmcallister/examples
e2052778cbd531bf716131b8311e87c4ee005f07
[ "MIT" ]
6
2019-10-18T13:32:46.000Z
2020-06-20T17:45:43.000Z
nginx-with-mtls-and-appserver/appserver/app.py
fshmcallister/examples
e2052778cbd531bf716131b8311e87c4ee005f07
[ "MIT" ]
4
2019-07-18T16:05:57.000Z
2021-09-23T23:27:06.000Z
nginx-with-mtls-and-appserver/appserver/app.py
fshmcallister/examples
e2052778cbd531bf716131b8311e87c4ee005f07
[ "MIT" ]
3
2019-07-15T13:16:49.000Z
2020-01-09T09:39:33.000Z
import re from flask import Flask, request app = Flask(__name__) def generate_whitelist(): whitelist = [] with open('/whitelist.txt', 'r') as f: for line in f.readlines(): if line.strip().endswith('d.wott.local'): whitelist.append(line.strip()) return whitelist def grant_client_access(headers): """ We need to check for: * 'Ssl-Client-Verify' = 'SUCCESS' * 'Ssl-Client' = 'CN=x.d.wott.local,O=Web of Trusted Things\\, Ltd,ST=London,C=UK') """ if not headers.get('Ssl-Client-Verify') == 'SUCCESS': return False whitelist = generate_whitelist() print('Device whitelist: {}'.format(whitelist)) # Extract the Common Name from the certificate matchObj = re.match( r'.*CN=(.*.d.wott.local)', headers.get('Ssl-Client'), re.M | re.I ) print('Got request from {}'.format(matchObj.group(1))) # Match the device against the whitelist if matchObj.group(1) in whitelist: print('{} found in whitelist'.format(matchObj.group(1))) return True return False @app.route('/') def hello_world(): if grant_client_access(request.headers): return 'Access granted!\n' else: return 'Access denied!\n'
24.442308
88
0.610543
163
1,271
4.693252
0.478528
0.047059
0.039216
0.057516
0
0
0
0
0
0
0
0.003122
0.243902
1,271
51
89
24.921569
0.792924
0.177026
0
0.064516
1
0
0.173529
0.021569
0
0
0
0
0
1
0.096774
false
0
0.064516
0
0.354839
0.096774
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2d8645dc45088fa874c2aeccbf853372bdcf387c
507
py
Python
notebook/03-udacityIntroductionToMachineLearning/projects/datasets_questions/utils/read_names.py
EmanuelFontelles/machineLearning
26e810c2dbc89c2076b312d02a957ab6f8dee7d8
[ "MIT" ]
2
2018-11-09T03:49:31.000Z
2019-06-28T17:24:04.000Z
notebook/03-udacityIntroductionToMachineLearning/projects/datasets_questions/utils/read_names.py
lucasvt01/Machine_Learning
6ee17061b77d23c966ad7700712938bfe00ef9c1
[ "MIT" ]
null
null
null
notebook/03-udacityIntroductionToMachineLearning/projects/datasets_questions/utils/read_names.py
lucasvt01/Machine_Learning
6ee17061b77d23c966ad7700712938bfe00ef9c1
[ "MIT" ]
2
2020-05-09T00:49:56.000Z
2021-08-28T07:24:46.000Z
import pandas as pd import sys from os import system sys.path.append('../final_project/') sys.path.append('../') def readNames(inputFile='new_poi_names.txt'): ''' A function to read names data from a file create by a data cache Returns: Returns a data frame that contains data from 'poi_names.txt' ''' #bash_command = 'bash script.sh' #system(bash_command) data = pd.read_csv(inputFile, skiprows=2, delimiter=';', header=None, names=['Ans', 'Name']) return(data)
25.35
96
0.672584
74
507
4.513514
0.608108
0.041916
0.077844
0
0
0
0
0
0
0
0
0.002445
0.193294
507
20
97
25.35
0.814181
0.376726
0
0
0
0
0.15411
0
0
0
0
0
0
1
0.125
false
0
0.375
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
2d8781eedd833caad160a7f046c6482144d1ef2b
1,270
py
Python
launch/test/legacy/launch_counter.py
stonier/launch
e8704247708eb017c388aaf8606e9dbb6971239b
[ "Apache-2.0" ]
null
null
null
launch/test/legacy/launch_counter.py
stonier/launch
e8704247708eb017c388aaf8606e9dbb6971239b
[ "Apache-2.0" ]
null
null
null
launch/test/legacy/launch_counter.py
stonier/launch
e8704247708eb017c388aaf8606e9dbb6971239b
[ "Apache-2.0" ]
null
null
null
# Copyright 2015 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys from tempfile import NamedTemporaryFile from launch.legacy.exit_handler import ignore_exit_handler from launch.legacy.output_handler import FileOutput def launch(launch_descriptor, argv): counter_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'counter.py') with NamedTemporaryFile(mode='w', prefix='foo_', delete=False) as h: foo_filename = h.name ld = launch_descriptor ld.add_process( cmd=[sys.executable, '-u', counter_file, '--limit', '15', '--sleep', '0.5'], name='foo', output_handlers=[FileOutput(filename=foo_filename)], exit_handler=ignore_exit_handler, )
35.277778
89
0.734646
180
1,270
5.072222
0.6
0.065717
0.028478
0.035049
0
0
0
0
0
0
0
0.011374
0.169291
1,270
35
90
36.285714
0.854028
0.451969
0
0
0
0
0.057185
0
0
0
0
0
0
1
0.0625
false
0
0.3125
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
2d9380bab4266fca29ab688eb70ff5eb6fd17ab1
990
py
Python
Matrix/Leetcode 909. Snakes and Ladders.py
kaizhengny/LeetCode
67d64536ab80f4966699fe7460d165f2a98d6a82
[ "MIT" ]
31
2020-06-23T00:40:04.000Z
2022-01-08T11:06:24.000Z
Matrix/Leetcode 909. Snakes and Ladders.py
kaizhengny/LeetCode
67d64536ab80f4966699fe7460d165f2a98d6a82
[ "MIT" ]
null
null
null
Matrix/Leetcode 909. Snakes and Ladders.py
kaizhengny/LeetCode
67d64536ab80f4966699fe7460d165f2a98d6a82
[ "MIT" ]
7
2020-04-30T08:46:03.000Z
2021-08-28T16:25:54.000Z
class Solution: def snakesAndLadders(self, board: List[List[int]]) -> int: n = len(board) q = collections.deque() q.append(1) visited = set() visited.add(1) step = 0 while q: size = len(q) for _ in range(size): num = q.popleft() if num == n*n: return step for i in range(1,7): if num+i > n*n: break nxt = self.getValue(board, num+i) if nxt == -1: nxt = num+i if nxt not in visited: q.append(nxt) visited.add(nxt) step += 1 return -1 def getValue(self, board, num): n = len(board) x = (num-1)//n y = (num-1)%n if x%2 == 1: y = n-1-y x = n-1-x return board[x][y]
28.285714
62
0.360606
115
990
3.095652
0.330435
0.033708
0.050562
0.050562
0
0
0
0
0
0
0
0.030172
0.531313
990
35
63
28.285714
0.737069
0
0
0.060606
0
0
0
0
0
0
0
0
0
1
0.060606
false
0
0
0
0.181818
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2d97f03499e19a5c1f5c27d7a74182ab0d44f492
4,049
py
Python
src/controllerarena/controllers/refVec.py
VerifiableRobotics/controller-arena
4506ef47404de85ec0511594740e53c27a21ef88
[ "BSD-3-Clause" ]
null
null
null
src/controllerarena/controllers/refVec.py
VerifiableRobotics/controller-arena
4506ef47404de85ec0511594740e53c27a21ef88
[ "BSD-3-Clause" ]
null
null
null
src/controllerarena/controllers/refVec.py
VerifiableRobotics/controller-arena
4506ef47404de85ec0511594740e53c27a21ef88
[ "BSD-3-Clause" ]
null
null
null
# code for python reference dipole vector field controller # these functions require stuff #from mathFuns import * from numpy import * from math import * class refVec: # define the constructor def __init__(self, q_0, controller_flag): # Initialize controller state self.phi_prev = None self.q_prev = q_0 self.e_int_w = 0 self.e_int_u = 0 # set gains self.k_p_u = 1 # u indicates it is an position gain. p indicates it is a proportional gain. self.k_p_w = 3 # w indicates it is an angular gain. p indicates it is a proportional gain. if controller_flag == 1: # PID self.k_i_w = 1 self.k_i_u = 1 self.k_d = -1 # the derivative gain is only on the angle elif controller_flag == 2: # PI self.k_i_w = 1 self.k_i_u = 1 self.k_d = 0 elif controller_flag == 3: # PD self.k_i_w = 0 self.k_i_u = 0 self.k_d = -1 else: # P self.k_i_w = 0 self.k_i_u = 0 self.k_d = 0 def get_output(self, q_d, q, dt): # obtain reference vector field value F = self.get_vector_field(q, q_d) # F is an column vector ## obtain control signal as a fcn of reference vector field value u = self.get_control(q, q_d, F, dt) return u def get_vector_field(self, q, q_d): # return type: numpy array # note: unsure if this vector field was just an example from the paper!! # compute vector field F # unpack # x = q[0][0] # y = q[1][0] # x_d = q_d[0][0] # y_d = q_d[1][0] # # # # compute [taken from paper draft], where r = [1;0] and lambda = 3 # Fx = 2*(x - x_d)**2 - (y - y_d)**2 # Fy = 3*(x - x_d)*(y - y_d) # F = array([[Fx],[Fy]]) lamb = 3 theta_d = q_d[2][0] delta_p = q[0:2] - q_d[0:2] # location - location_desired r = array([[cos(theta_d)],[sin(theta_d)]]) F = lamb*(dot(transpose(r), delta_p)[0][0])*delta_p - r*(dot(transpose(delta_p), delta_p)[0][0]) # should be col vector print F return F # col vector def get_control(self, q, q_d, F, dt): # I think that this control law is not a function of the vector field, and that it should # work if F(q) changes # # compute control signal u delta_p = q[0:2] - q_d[0:2] # location - location_desired self.e_int_w += self.sub_angles(q[2][0],q_d[2][0])*dt # accumulate angular error self.e_int_u += linalg.norm(delta_p)*dt # accumulate position error theta = q[2][0] # unpack gains k_p_u = self.k_p_u k_p_w = self.k_p_w k_i_w = self.k_i_w k_i_u = self.k_i_u k_d = self.k_d Fx = F[0][0] Fy = F[1][0] phi = atan2(Fy,Fx) # backward finite difference for phidot if self.phi_prev == None: # if this is the first pass through the controller, phi_dot = 0 self.phi_prev = phi # end if phi_dot = (phi-self.phi_prev)/dt self.phi_prev = phi q_dot = (q-self.q_prev)/dt self.q_prev = q # controller v = -k_p_u*sign( dot(transpose(delta_p), array([[cos(theta)],[sin(theta)]]) )[0][0] )*tanh(linalg.norm(delta_p)**2) - k_i_u*self.e_int_u w = -k_p_w*self.sub_angles(theta, phi) - k_i_w*self.e_int_w - k_d*phi_dot # k_d determines whether derivative term is used, k_i for i term u = array([[v], [w]]) print u return u def update_state(self, q_d, q, dt): # x_k+1 = 0 pass def sub_angles(self, ang1, ang2): return (ang1 - ang2 + pi)%(2*pi) - pi # For future: # pass r vector as parameter # low pass filtering for derivatives (PD control?) [phidot] # visual stuff # global feedback plan is the ref vecf field # controller is a function of vector field, but you can use a better controller to get better performance
34.313559
147
0.568288
685
4,049
3.167883
0.236496
0.043779
0.02765
0.016129
0.119816
0.105991
0.105991
0.105991
0.073733
0.073733
0
0.026594
0.322055
4,049
117
148
34.606838
0.763934
0.388491
0
0.272727
0
0
0
0
0
0
0
0
0
0
null
null
0.015152
0.030303
null
null
0.030303
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2da60021013430b40d9e59ed61bc0bac8ce56e08
416
py
Python
spotdl/get-file-name.py
Shaxadhere/spotdl
fc7d587a86b886fa4e020ac825d1748a7776de32
[ "MIT" ]
25
2019-02-21T09:31:56.000Z
2022-03-13T15:36:24.000Z
spotdl/get-file-name.py
Shaxadhere/spotdl
fc7d587a86b886fa4e020ac825d1748a7776de32
[ "MIT" ]
2
2019-09-02T20:04:44.000Z
2019-12-27T22:13:13.000Z
spotdl/get-file-name.py
Shaxadhere/spotdl
fc7d587a86b886fa4e020ac825d1748a7776de32
[ "MIT" ]
9
2019-10-01T12:44:29.000Z
2021-03-24T10:09:03.000Z
from spotdl import handle from spotdl import const from spotdl import downloader import os import sys const.args = handle.get_arguments(to_group=True) track = downloader.Downloader(raw_song=const.args.song[0]) track_title = track.refine_songname(track.content.title) track_filename = track_title + const.args.output_ext track_download_path = os.path.join(const.args.folder, track_filename) print(track_filename)
26
69
0.822115
63
416
5.238095
0.47619
0.109091
0.145455
0
0
0
0
0
0
0
0
0.002653
0.09375
416
16
70
26
0.872679
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.454545
0
0.454545
0.090909
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
2da911fdfe3d966fb015f3569a027ce8ff19ecfb
976
py
Python
Prototype Pygame/arrays.py
KValexander/own_rts
6bfeadb5c5d29461471c84d883b616117cea79f5
[ "MIT" ]
null
null
null
Prototype Pygame/arrays.py
KValexander/own_rts
6bfeadb5c5d29461471c84d883b616117cea79f5
[ "MIT" ]
null
null
null
Prototype Pygame/arrays.py
KValexander/own_rts
6bfeadb5c5d29461471c84d883b616117cea79f5
[ "MIT" ]
null
null
null
# Connect files from configs import * # Arrays items = [] selectedItems = [] # Interface arrays buttons = [] surfaces = [] # Getting item def getItemById(ident): for item in items: if item.id == ident: return item # Removing item def removeItem(item): items.remove(item) # Removing items def removeItems(): for item in selectedItems: items.remove(item) clearSelection() # Adding items in selection items def addSelection(item): item.selected = True selectedItems.append(item) # Clear selected items def clearSelection(): for item in items: item.selected = False selectedItems.clear() # Clear buttons def clearButtons(): buttons.clear() # Clear surfaces def clearSurfaces(): surfaces.clear() # Import templates from templates import Worker # Adding item def addItem(case, counter, x, y, faction): if(case == "worker"): item = Worker(counter, x, y, faction) if(case == "soldier"): item = Soldier(counter, x, y, faction) items.append(item)
16.542373
42
0.71209
123
976
5.650407
0.365854
0.030216
0.038849
0.069065
0.063309
0.063309
0
0
0
0
0
0
0.172131
976
58
43
16.827586
0.860149
0.194672
0
0.121212
0
0
0.016818
0
0
0
0
0
0
1
0.242424
false
0
0.060606
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
2dafc2ffcc26838e9b8053b355658613f3b90115
1,027
py
Python
3rdparty/wsgi_intercept/test/test_mechanoid.py
arda2525/fixofx
1792d94697af682ca1d4a75cfefe98465d95a288
[ "Apache-2.0" ]
50
2015-01-01T00:14:04.000Z
2020-11-26T04:44:30.000Z
3rdparty/wsgi_intercept/test/test_mechanoid.py
arda2525/fixofx
1792d94697af682ca1d4a75cfefe98465d95a288
[ "Apache-2.0" ]
3
2016-01-31T17:14:41.000Z
2017-03-01T13:36:17.000Z
3rdparty/wsgi_intercept/test/test_mechanoid.py
arda2525/fixofx
1792d94697af682ca1d4a75cfefe98465d95a288
[ "Apache-2.0" ]
15
2015-10-29T09:04:21.000Z
2022-01-19T17:33:25.000Z
#! /usr/bin/env python2.3 from wsgi_intercept.mechanoid_intercept import Browser from nose.tools import with_setup import wsgi_intercept from wsgi_intercept import test_wsgi_app ### _saved_debuglevel = None def install(port=80): _saved_debuglevel, wsgi_intercept.debuglevel = wsgi_intercept.debuglevel, 1 wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', port, test_wsgi_app.create_fn) def uninstall(): wsgi_intercept.debuglevel = _saved_debuglevel @with_setup(install, uninstall) def test_success(): b = Browser() b.open('http://some_hopefully_nonexistant_domain:80/') assert test_wsgi_app.success() @with_setup(install, uninstall) def test_https_success(): b = Browser() b.open('https://some_hopefully_nonexistant_domain/') assert test_wsgi_app.success() @with_setup(lambda: install(443), uninstall) def test_https_specific_port_success(): b = Browser() b.open('https://some_hopefully_nonexistant_domain:443/') assert test_wsgi_app.success()
30.205882
105
0.772152
137
1,027
5.423358
0.306569
0.139973
0.074024
0.161507
0.370121
0.310902
0.236878
0.148048
0.148048
0.148048
0
0.014477
0.125609
1,027
34
106
30.205882
0.812918
0.023369
0
0.32
0
0
0.165
0.033
0
0
0
0
0.12
1
0.2
false
0
0.16
0
0.36
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2db8d601711034292e85c17511fbcd5cd25b13fb
6,126
py
Python
locust/test/test_env.py
radhakrishnaakamat/locust
51b1d5038a2be6e2823b2576c4436f2ff9f7c7c2
[ "MIT" ]
1
2022-02-26T00:17:46.000Z
2022-02-26T00:17:46.000Z
locust/test/test_env.py
radhakrishnaakamat/locust
51b1d5038a2be6e2823b2576c4436f2ff9f7c7c2
[ "MIT" ]
1
2020-12-29T04:26:09.000Z
2020-12-29T04:26:09.000Z
locust/test/test_env.py
radhakrishnaakamat/locust
51b1d5038a2be6e2823b2576c4436f2ff9f7c7c2
[ "MIT" ]
1
2022-02-25T14:23:40.000Z
2022-02-25T14:23:40.000Z
from locust import ( constant, ) from locust.env import Environment, LoadTestShape from locust.user import ( User, task, ) from locust.user.task import TaskSet from .testcases import LocustTestCase from .fake_module1_for_env_test import MyUserWithSameName as MyUserWithSameName1 from .fake_module2_for_env_test import MyUserWithSameName as MyUserWithSameName2 class TestEnvironment(LocustTestCase): def test_user_classes_count(self): class MyUser1(User): wait_time = constant(0) @task def my_task(self): pass class MyUser2(User): wait_time = constant(0) @task def my_task(self): pass environment = Environment(user_classes=[MyUser1, MyUser2]) self.assertDictEqual({"MyUser1": MyUser1, "MyUser2": MyUser2}, environment.user_classes_by_name) def test_user_classes_with_same_name_is_error(self): with self.assertRaises(ValueError) as e: environment = Environment(user_classes=[MyUserWithSameName1, MyUserWithSameName2]) self.assertEqual( e.exception.args[0], "The following user classes have the same class name: locust.test.fake_module1_for_env_test.MyUserWithSameName, locust.test.fake_module2_for_env_test.MyUserWithSameName", ) def test_assign_equal_weights(self): def verify_tasks(u, target_tasks): self.assertEqual(len(u.tasks), len(target_tasks)) tasks = [t.__name__ for t in u.tasks] self.assertEqual(len(tasks), len(set(tasks))) self.assertEqual(set(tasks), set(target_tasks)) # Base case class MyUser1(User): wait_time = constant(0) @task(4) def my_task(self): pass @task(1) def my_task_2(self): pass environment = Environment(user_classes=[MyUser1]) environment.assign_equal_weights() u = environment.user_classes[0] verify_tasks(u, ["my_task", "my_task_2"]) # Testing nested task sets class MyUser2(User): @task class TopLevelTaskSet(TaskSet): @task class IndexTaskSet(TaskSet): @task(10) def index(self): self.client.get("/") @task def stop(self): self.client.get("/hi") @task(2) def stats(self): self.client.get("/stats/requests") environment = Environment(user_classes=[MyUser2]) environment.assign_equal_weights() u = environment.user_classes[0] verify_tasks(u, ["index", "stop", "stats"]) # Testing task assignment via instance variable def outside_task(): pass def outside_task_2(): pass class SingleTaskSet(TaskSet): tasks = [outside_task, outside_task, outside_task_2] class MyUser3(User): tasks = [SingleTaskSet, outside_task] environment = Environment(user_classes=[MyUser3]) environment.assign_equal_weights() u = environment.user_classes[0] verify_tasks(u, ["outside_task", "outside_task_2"]) # Testing task assignment via dict class DictTaskSet(TaskSet): def dict_task_1(): pass def dict_task_2(): pass def dict_task_3(): pass tasks = { dict_task_1: 5, dict_task_2: 3, dict_task_3: 1, } class MyUser4(User): tasks = [DictTaskSet, SingleTaskSet, SingleTaskSet] # Assign user tasks in dict environment = Environment(user_classes=[MyUser4]) environment.assign_equal_weights() u = environment.user_classes[0] verify_tasks(u, ["outside_task", "outside_task_2", "dict_task_1", "dict_task_2", "dict_task_3"]) class MyUser5(User): tasks = { DictTaskSet: 5, SingleTaskSet: 3, outside_task: 6, } environment = Environment(user_classes=[MyUser5]) environment.assign_equal_weights() u = environment.user_classes[0] verify_tasks(u, ["outside_task", "outside_task_2", "dict_task_1", "dict_task_2", "dict_task_3"]) def test_user_classes_with_zero_weight_are_removed(self): class MyUser1(User): wait_time = constant(0) weight = 0 @task def my_task(self): pass class MyUser2(User): wait_time = constant(0) weight = 1 @task def my_task(self): pass environment = Environment(user_classes=[MyUser1, MyUser2]) self.assertEqual(len(environment.user_classes), 1) self.assertIs(environment.user_classes[0], MyUser2) def test_all_user_classes_with_zero_weight_raises_exception(self): class MyUser1(User): wait_time = constant(0) weight = 0 @task def my_task(self): pass class MyUser2(User): wait_time = constant(0) weight = 0 @task def my_task(self): pass with self.assertRaises(ValueError) as e: environment = Environment(user_classes=[MyUser1, MyUser2]) self.assertEqual( e.exception.args[0], "There are no users with weight > 0.", ) def test_shape_class_attribute(self): class SubLoadTestShape(LoadTestShape): """Inherited from locust.env.LoadTestShape""" with self.assertRaisesRegex( ValueError, r"instance of LoadTestShape or subclass LoadTestShape", msg="exception message is mismatching" ): Environment(user_classes=[MyUserWithSameName1], shape_class=SubLoadTestShape)
30.326733
182
0.579987
639
6,126
5.322379
0.179969
0.07439
0.116436
0.087327
0.432226
0.378418
0.344016
0.329903
0.302558
0.302558
0
0.021303
0.333333
6,126
201
183
30.477612
0.811459
0.029383
0
0.466667
0
0.006667
0.082884
0.019036
0
0
0
0
0.073333
1
0.153333
false
0.086667
0.046667
0
0.313333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
2dd0dbf3c9583810719a7c459b8980e6849b03e3
696
py
Python
buildings/gui/menu_frame.py
strk/nz-buildings
8dc8ee19d322837380bb4f016b01eccee2c1bd0a
[ "PostgreSQL", "CC-BY-4.0" ]
2
2020-02-21T00:46:31.000Z
2020-08-17T14:22:19.000Z
buildings/gui/menu_frame.py
strk/nz-buildings
8dc8ee19d322837380bb4f016b01eccee2c1bd0a
[ "PostgreSQL", "CC-BY-4.0" ]
243
2018-12-16T22:01:54.000Z
2022-01-10T20:09:24.000Z
buildings/gui/menu_frame.py
strk/nz-buildings
8dc8ee19d322837380bb4f016b01eccee2c1bd0a
[ "PostgreSQL", "CC-BY-4.0" ]
1
2020-03-24T10:35:43.000Z
2020-03-24T10:35:43.000Z
# -*- coding: utf-8 -*- import os.path from qgis.PyQt import uic from qgis.PyQt.QtWidgets import QFrame from buildings.utilities.layers import LayerRegistry # Get the path for the parent directory of this file. __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) FORM_CLASS, _ = uic.loadUiType(os.path.join(os.path.dirname(__file__), "menu_frame.ui")) class MenuFrame(QFrame, FORM_CLASS): def __init__(self, dockwidget, parent=None): """Constructor.""" super(MenuFrame, self).__init__(parent) self.setupUi(self) self.txt_dashboard.viewport().setAutoFillBackground(False) self.layer_registry = LayerRegistry()
27.84
88
0.719828
90
696
5.277778
0.566667
0.075789
0.050526
0.050526
0
0
0
0
0
0
0
0.001698
0.153736
696
24
89
29
0.804754
0.125
0
0
0
0
0.021595
0
0
0
0
0
0
1
0.083333
false
0
0.333333
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
2dd1642cbad2de85e44adff8bc936f9819e814e3
3,749
py
Python
gitver/config.py
movermeyer/gitver
77d5a4420209a4ca00349b094eeca1f13e50d8e5
[ "Apache-2.0" ]
12
2015-03-31T18:19:42.000Z
2019-08-14T10:56:00.000Z
gitver/config.py
movermeyer/gitver
77d5a4420209a4ca00349b094eeca1f13e50d8e5
[ "Apache-2.0" ]
1
2015-04-21T12:44:40.000Z
2019-07-15T07:12:15.000Z
gitver/config.py
movermeyer/gitver
77d5a4420209a4ca00349b094eeca1f13e50d8e5
[ "Apache-2.0" ]
8
2016-03-25T16:43:13.000Z
2021-11-26T10:44:57.000Z
#!/usr/bin/env python2 # coding=utf-8 """ The default per-repository configuration """ import sys import json import string from os.path import exists, dirname from gitver.defines import CFGFILE from termcolors import term, bold default_config_text = """{ # automatically generated configuration file # # These defaults implement Semantic Versioning as described in the latest # available documentation at http://semver.org/spec/v2.0.0.html # by default, terminal output is NOT colorized for compatibility with older # terminal emulators: you may enable this if you like a more modern look "use_terminal_colors": false, # prevent gitver from storing any information in its configuration directory # if the .gitignore file doesn't exclude it from the repository "safe_mode": true, # default pre-release metadata when commit count > 0 AND # no NEXT has been defined "default_meta_pr_in_next_no_next": "NEXT", # default pre-release metadata when commit count > 0 "default_meta_pr_in_next": "SNAPSHOT", # default pre-release metadata prefix "meta_pr_prefix": "-", # default commit count prefix "commit_count_prefix": ".", # Python-based format string variable names are: # maj, min, patch, rev, rev_prefix, meta_pr_prefix, meta_pr, # commit_count_prefix, commit_count, build_id, build_id_full # # Note that prefixes will be empty strings if their valued counterpart # doesn't have a meaningful value (i.e., 0 for commit count, no meta # pre-release, ..) # format string used to build the current version string when the # commit count is 0 "format": "%(maj)s.%(min)s.%(patch)s%(rev_prefix)s%(rev)s%(meta_pr_prefix)s%(meta_pr)s", # format string used to build the current version string when the # commit count is > 0 "format_next": "%(maj)s.%(min)s.%(patch)s%(rev_prefix)s%(rev)s%(meta_pr_prefix)s%(meta_pr)s%(commit_count_prefix)s%(commit_count)s+%(build_id)s" }""" def remove_comments(text): """ Removes line comments denoted by sub-strings starting with a '#' character from the specified string, construct a new text and returns it. """ data = string.split(text, '\n') ret = '' for line in data: if not line.strip().startswith('#'): ret += line return ret default_config = json.loads(remove_comments(default_config_text)) def create_default_configuration_file(): """ Creates a default configuration file from the default gitver's configuration text string in the predefined gitver's configuration directory. """ if not exists(CFGFILE): if exists(dirname(CFGFILE)): with open(CFGFILE, 'w') as f: f.writelines(default_config_text) return True return False def load_user_config(): """ Returns the gitver's configuration: tries to read the stored configuration file and merges it with the default one, ensuring a valid configuration is always returned. """ try: with open(CFGFILE, 'r') as f: data = '' for line in f: l = line.strip() if not l.startswith('#'): data += l user = json.loads(data) except IOError: user = dict() except (ValueError, KeyError) as v: term.err("An error occured parsing the configuration file \"" + CFGFILE + "\": " + v.message + "\nPlease check its syntax or rename it and generate the " "default one with the " + bold("gitver init") + " command.") sys.exit(1) # merge user with defaults return dict(default_config, **user)
31.771186
148
0.654308
507
3,749
4.729783
0.400394
0.050459
0.020017
0.031276
0.174312
0.135113
0.135113
0.135113
0.100917
0.100917
0
0.003913
0.2502
3,749
117
149
32.042735
0.849164
0.145906
0
0.027397
0
0.027397
0.608571
0.099456
0
0
0
0
0
1
0.041096
false
0
0.082192
0
0.178082
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2dd344951d9aeadc827ae7e0e3e8b87cd96497d0
3,305
py
Python
3. Others/Python_OOP_Passenger_Registration.py
PurveshMakode24/snippets
ba360b363c7b98528d6b7320dcc446a9e3febd4b
[ "MIT" ]
1
2018-12-25T21:10:02.000Z
2018-12-25T21:10:02.000Z
3. Others/Python_OOP_Passenger_Registration.py
PurveshMakode24/snippets
ba360b363c7b98528d6b7320dcc446a9e3febd4b
[ "MIT" ]
2
2019-10-01T16:07:46.000Z
2019-10-01T16:07:47.000Z
3. Others/Python_OOP_Passenger_Registration.py
PurveshMakode24/snippets
ba360b363c7b98528d6b7320dcc446a9e3febd4b
[ "MIT" ]
4
2020-09-01T02:22:44.000Z
2020-10-07T12:14:58.000Z
from random import randint import re; import json class Passenger: def __init__(self, passengerId, passengerName, email, password, address, contact): self.passengerId = passengerId self.passengerName = passengerName self.email = email self.password = password self.address = address self.contact = contact class ValidateField: def __init__(self, passengerObj): self.passengerObj = passengerObj def checkPassengerName(self): if not "".join(self.passengerObj.passengerName.split()).isalpha(): print("\nEnter a valid name.") elif len(self.passengerObj.passengerName)>50: print("\nName should not exceed 50 characters.") else: return True def checkEmail(self): regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,4}$' if not re.match(regex, self.passengerObj.email): print("\nPlease enter a valid email.") else: return True def checkPassword(self): if not len(self.passengerObj.password)>7: print("\nPassword should be greater than 7.") else: return True def checkAddress(self): if len(self.passengerObj.address)>100: print("\nAddress should not exceeds 100 characters.") elif not len(self.passengerObj.address): print("\nAddress should not be empty.") else: return True def checkContact(self): if len(str(self.passengerObj.contact))>10: print("\nContact number should not exceeds 10 characters.") else: return True def displayPassengers(passengerList): if len(passengerList): print(json.dumps([p.__dict__ for p in passengerList], indent=4)) else: print("\nNo data found.") def registration(passengerList): try: passengerId = int(randint(1000000,9999999) or 0000000) print("\nPassenger ID:", passengerId) passengerName = input("Enter the passenger name:") email = input("Enter email:") password = input("Enter password:") address = input("Enter address:").capitalize() contact = int(input("Enter contact number:")) passengerObj = Passenger(passengerId, passengerName, email, password, address, contact) v = ValidateField(passengerObj) if v.checkPassengerName() and v.checkEmail() and v.checkPassword() and v.checkAddress() and v.checkContact(): passengerList.append(passengerObj) print("\nPassenger Registration is Sucessful!") except Exception as e: print("Error:", e) if __name__ == '__main__': print("="*52+"\nPASSENGER REGISTRATION\n"+"="*52) print("1. Enter 1 to register a passenger.") print("2. Enter 2 to display all the registered passengers.") print("3. Enter -1 to exit.\n"+"-"*52) passengerList = [] while True: c = int(input("Enter you choice:") or -1) if c==1: registration(passengerList) elif c==2: displayPassengers(passengerList) else: break print("-"*52)
30.601852
117
0.586082
344
3,305
5.569767
0.337209
0.075157
0.036534
0.044363
0.08142
0.053236
0
0
0
0
0
0.026441
0.301967
3,305
107
118
30.88785
0.804075
0
0
0.151899
0
0.012658
0.186856
0.01272
0.012658
0
0
0
0
1
0.113924
false
0.455696
0.025316
0
0.227848
0.21519
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
2dd39dc7304ad676d501a32885e968d0740bdaf7
1,379
py
Python
software/examples/python/03-multiplexer/main.py
esysberlin/lufo-ifez-datenkonzentrator
f803724b316e2df46e69afab91afa22c64a8d920
[ "MIT" ]
2
2019-02-04T16:04:29.000Z
2019-02-04T23:33:12.000Z
software/examples/python/03-multiplexer/main.py
esysberlin/lufo-ifez-datenkonzentrator
f803724b316e2df46e69afab91afa22c64a8d920
[ "MIT" ]
null
null
null
software/examples/python/03-multiplexer/main.py
esysberlin/lufo-ifez-datenkonzentrator
f803724b316e2df46e69afab91afa22c64a8d920
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import random from common import spi def main(): multiplexer_state = spi.parse_response(spi.query('multiplexer', 'get_outputs')) print("Current multiplexer state:") _print_state(multiplexer_state) print('') for i in range(len(multiplexer_state)): multiplexer_state[i] = _random_input() print('Setting multiplexer to {} ...'.format(multiplexer_state)) print('Response: {}'.format(spi.parse_response(spi.query('multiplexer', 'set_outputs', multiplexer_state)))) print('') multiplexer_state = spi.parse_response(spi.query('multiplexer', 'get_outputs')) print("New state:") _print_state(multiplexer_state) _MULTIPLEXER_INPUTS = ['A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8'] _MULTIPLEXER_OUTPUTS = ['OUT_A', 'OUT_B'] def _print_state(spi_response): def print_output_state(name, state): if state == 'NC': print('{} is not connected.'.format(name)) else: print('{} is connected to {}.'.format(name, state)) for i in range(len(spi_response)): print_output_state(_MULTIPLEXER_OUTPUTS[i], spi_response[i]) def _random_input(): return _MULTIPLEXER_INPUTS[int(random.random() * len(_MULTIPLEXER_INPUTS))] if __name__ == '__main__': main()
28.729167
88
0.641769
168
1,379
4.964286
0.380952
0.172662
0.100719
0.068345
0.308153
0.20024
0.158273
0.158273
0.158273
0.158273
0
0.0161
0.189268
1,379
47
89
29.340426
0.729875
0.031182
0
0.193548
0
0
0.177661
0
0
0
0
0
0
1
0.129032
false
0
0.064516
0.032258
0.225806
0.419355
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
1
2ddb711319d2de41b35803baad0302688374879f
1,600
py
Python
plots/plottingcompare.py
HPQC-LABS/Quantum-Graph-Spectra
b897d94dd03c48ffec5735b3dc5b86f8c3ab5a8f
[ "MIT" ]
1
2020-07-29T06:42:32.000Z
2020-07-29T06:42:32.000Z
plots/plottingcompare.py
HPQC-LABS/Quantum-Graph-Spectra
b897d94dd03c48ffec5735b3dc5b86f8c3ab5a8f
[ "MIT" ]
null
null
null
plots/plottingcompare.py
HPQC-LABS/Quantum-Graph-Spectra
b897d94dd03c48ffec5735b3dc5b86f8c3ab5a8f
[ "MIT" ]
2
2021-03-29T13:40:47.000Z
2021-03-29T13:41:00.000Z
''' @author: Josh Payne Description: For creating multiple overlaid charts ''' from mpl_toolkits.axes_grid1 import host_subplot import mpl_toolkits.axisartist as AA import matplotlib.pyplot as plt import numpy as np ### Get values from performance.py, input here ### x = [4, 5, 8, 9, 16, 32, 64] lst = [(2.585895228385925, 4.9364027687737175e-08), (13.683554983139038, 0.035110609181115214), (16.250244092941283, 0.17863540370194406), (63.24338710308075, 0.2964172356868715), (81.63710451126099, 0.4288713187054065), (457.574907541275, 1.5497983761583067), (2580.9958889484406, 3.939294261935955)] # otherlist = [0.0023797988891601563, 1.6597139596939088, 1.7730239033699036, 2.4004372358322144, 2.2994803905487062, 1.8459036707878114, 1.3680771589279175] times = [i[0] for i in lst] accuracies = [i[1] for i in lst] host = host_subplot(111, axes_class=AA.Axes) plt.subplots_adjust(right=0.75) par1 = host.twinx() par2 = host.twinx() offset = 0 new_fixed_axis = par2.get_grid_helper().new_fixed_axis par2.axis["right"] = new_fixed_axis(loc="right", axes=par2, offset=(offset, 0)) par2.axis["right"].toggle(all=True) host.set_xlabel("Number of Vertices") host.set_ylabel("Mean Runtime (s)") par1.set_ylabel("Mean Error") p1, = host.plot(x, times, label="Mean Runtime (s)") p2, = par1.plot(x, accuracies, label="Mean Error") par1.set_ylim(-0.4, 3.99) par2.set_ylim(0.4, 3.939) host.legend() par1.axis["left"].label.set_color(p1.get_color()) par2.axis["right"].label.set_color(p2.get_color()) plt.draw() plt.show()
30.769231
302
0.71
229
1,600
4.855895
0.510917
0.021583
0.032374
0.016187
0.017986
0
0
0
0
0
0
0.296215
0.14125
1,600
52
303
30.769231
0.5131
0.17
0
0
0
0
0.071483
0
0
0
0
0
0
1
0
false
0
0.133333
0
0.133333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2ddc43ce9995edf0340e08d59a52ccaf4be2ad42
804
py
Python
pulse2percept/datasets/__init__.py
pulse2percept/pulse2percept
67e0f2354db5ebe306b617f7f78a9ea8c02327ac
[ "BSD-3-Clause" ]
40
2019-11-01T14:09:34.000Z
2022-02-28T19:08:01.000Z
pulse2percept/datasets/__init__.py
jgranley/pulse2percept
65c11393a33d1531cd02a3e38243414bf8172e9a
[ "BSD-3-Clause" ]
277
2019-11-22T03:30:31.000Z
2022-03-28T00:11:03.000Z
pulse2percept/datasets/__init__.py
jgranley/pulse2percept
65c11393a33d1531cd02a3e38243414bf8172e9a
[ "BSD-3-Clause" ]
31
2020-01-22T06:36:36.000Z
2022-01-20T09:54:25.000Z
"""Utilities to download and import datasets. * **Dataset loaders** can be used to load small datasets that come pre-packaged with the pulse2percept software. * **Dataset fetchers** can be used to download larger datasets from a given URL and directly import them into pulse2percept. .. autosummary:: :toctree: _api base horsager2009 beyeler2019 nanduri2012 .. seealso:: * :ref:`Basic Concepts > Datasets <topics-datasets>` """ from .base import clear_data_dir, get_data_dir, fetch_url from .beyeler2019 import fetch_beyeler2019 from .horsager2009 import load_horsager2009 from .nanduri2012 import load_nanduri2012 __all__ = [ 'clear_data_dir', 'fetch_url', 'fetch_beyeler2019', 'get_data_dir', 'load_horsager2009', 'load_nanduri2012', ]
22.333333
76
0.726368
97
804
5.804124
0.494845
0.049734
0.031972
0.039076
0
0
0
0
0
0
0
0.076923
0.191542
804
35
77
22.971429
0.789231
0.569652
0
0
0
0
0.251479
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
2ddf7737c3560fadbc544f9a488716622f8551b0
7,093
py
Python
pipeline/mk_all_level1_fsf_bbr.py
lbconner/openfMRI
265d8ef013dad14fd1741d5817d00f9411d85103
[ "BSD-2-Clause" ]
33
2015-02-17T17:21:43.000Z
2021-08-23T08:27:10.000Z
pipeline/mk_all_level1_fsf_bbr.py
lbconner/openfMRI
265d8ef013dad14fd1741d5817d00f9411d85103
[ "BSD-2-Clause" ]
13
2015-01-14T15:17:09.000Z
2017-07-10T02:17:06.000Z
pipeline/mk_all_level1_fsf_bbr.py
lbconner/openfMRI
265d8ef013dad14fd1741d5817d00f9411d85103
[ "BSD-2-Clause" ]
24
2015-01-27T10:02:47.000Z
2021-03-19T20:05:35.000Z
#!/usr/bin/env python """ mk_all_level1_fsf.py - make fsf files for all subjects USAGE: python mk_all_level1_fsf_bbr.py <name of dataset> <modelnum> <basedir - default is staged> <nonlinear - default=1> <smoothing - default=0> <tasknum - default to all> """ ## Copyright 2011, Russell Poldrack. All rights reserved. ## Redistribution and use in source and binary forms, with or without modification, are ## permitted provided that the following conditions are met: ## 1. Redistributions of source code must retain the above copyright notice, this list of ## conditions and the following disclaimer. ## 2. Redistributions in binary form must reproduce the above copyright notice, this list ## of conditions and the following disclaimer in the documentation and/or other materials ## provided with the distribution. ## THIS SOFTWARE IS PROVIDED BY RUSSELL POLDRACK ``AS IS'' AND ANY EXPRESS OR IMPLIED ## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND ## FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RUSSELL POLDRACK OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR ## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ## ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING ## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os import glob from mk_level1_fsf_bbr import * from mk_level1_fsf import * import launch_qsub import argparse import sys def usage(): """Print the docstring and exit with error.""" sys.stdout.write(__doc__) sys.exit(2) def parse_command_line(): parser = argparse.ArgumentParser(description='setup_subject') #parser.add_argument('integers', metavar='N', type=int, nargs='+',help='an integer for the accumulator') # set up boolean flags parser.add_argument('--taskid', dest='taskid', required=True,help='Task ID') parser.add_argument('--parenv', dest='parenv', default='2way',help='Parallel environment') parser.add_argument('--anatimg', dest='anatimg', default='',help='Specified anatomy image') parser.add_argument('--tasknum', dest='tasknum',type=int, help='Task number') parser.add_argument('--basedir', dest='basedir', default=os.getcwd(),help='Base directory (above taskid directory)') parser.add_argument('--smoothing', dest='smoothing',type=int, default=0,help='Smoothing (mm FWHM)') parser.add_argument('--noconfound', dest='confound', action='store_false', default=True,help='Omit motion/confound modeling') parser.add_argument('--use_inplane', dest='use_inplane', type=int, default=0,help='Use inplane image') parser.add_argument('--nonlinear', dest='nonlinear', action='store_true', default=False,help='Use nonlinear regristration') parser.add_argument('--nobbr', dest='nobbr', action='store_true', default=False,help='Use standard reg instead of BBR') parser.add_argument('--nohpf', dest='hpf', action='store_false', default=True,help='Turn off high pass filtering') parser.add_argument('--nowhiten', dest='whiten', action='store_false', default=True,help='Turn off prewhitening') parser.add_argument('--test', dest='test', action='store_true', default=False,help='Test mode (do not run job)') parser.add_argument('--nolaunch', dest='launch', action='store_false', default=True,help='Do not launch job') parser.add_argument('--modelnum', dest='modelnum',type=int, default=1,help='Model number') parser.add_argument('--ncores', dest='ncores',type=int, default=0,help='number of cores (ncores * way = 12)') args = parser.parse_args() return args def main(): args=parse_command_line() print args smoothing=args.smoothing use_inplane=args.use_inplane basedir=os.path.abspath(args.basedir) nonlinear=args.nonlinear modelnum=args.modelnum if not args.confound: print 'omitting confound modeling' dataset=args.taskid if not args.test: outfile=open('mk_all_level1_%s.sh'%dataset,'w') tasknum_spec='task*' if not args.tasknum==None: tasknum_spec='task%03d*'%args.tasknum dsdir=os.path.join(basedir,dataset) bolddirs=glob.glob(os.path.join(dsdir,'sub*/BOLD/%s'%tasknum_spec)) print bolddirs for root in bolddirs: #print 'ROOT:',root for m in glob.glob(os.path.join(root,'bold_mcf_brain.nii.gz')): #print 'BOLDFILE:',m f_split=root.split('/') #print f_split scankey='/'+'/'.join(f_split[1:7])+'/scan_key.txt' taskid=f_split[6] subnum=int(f_split[7].lstrip('sub')) taskinfo=f_split[9].split('_') tasknum=int(taskinfo[0].lstrip('task')) runnum=int(taskinfo[1].lstrip('run')) #tr=float(load_scankey(scankey)['TR']) # check for inplane inplane='/'+'/'.join(f_split[1:8])+'/anatomy/inplane001_brain.nii.gz' ## if args.nobbr: ## print 'using nobbr option' ## print 'mk_level1_fsf("%s",%d,%d,%d,%d,%d,"%s",%d)'%(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,modelnum) ## else: ## print 'mk_level1_fsf_bbr("%s",%d,%d,%d,%d,%d,"%s",%d)'%(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,modelnum) if not args.test: if args.nobbr: fname=mk_level1_fsf(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,nonlinear,modelnum) else: fname=mk_level1_fsf_bbr(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,nonlinear,modelnum,args.anatimg,args.confound,args.hpf,args.whiten) #print 'CMD: mk_level1_fsf_bbr(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,nonlinear,modelnum,args.anatimg,args.confound)' outfile.write('feat %s\n'%fname) if not args.test: outfile.close() if not args.test: print 'now launching all feats:' print "find %s/sub*/model/*.fsf |sed 's/^/feat /' > run_all_feats.sh; sh run_all_feats.sh"%args.taskid f=open('mk_all_level1_%s.sh'%dataset) l=f.readlines() f.close() njobs=len(l) if args.parenv=='': args.parenv='6way' way=float(args.parenv.replace('way','')) if args.ncores==0: ncores=(njobs/way)*12.0 else: ncores=args.ncores if args.launch: launch_qsub.launch_qsub(script_name='mk_all_level1_%s.sh'%dataset,runtime='04:00:00',jobname='%sl1'%dataset,email=False,parenv=args.parenv,ncores=ncores) if __name__ == '__main__': main()
41.238372
172
0.661638
942
7,093
4.866242
0.312102
0.033377
0.063045
0.027269
0.234948
0.186736
0.161867
0.136126
0.119546
0.119546
0
0.009564
0.204004
7,093
171
173
41.479532
0.802338
0.287889
0
0.059406
0
0.009901
0.221209
0.011241
0.019802
0
0
0
0
0
null
null
0.009901
0.069307
null
null
0.049505
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2de4ee5bc7351ca57d213412fccfba40eb972d36
1,880
py
Python
tests/python/unittest/test_arith_stmt_simplify.py
ndl/tvm
6e4c6d7a3a840ae1f7f996c856357068ba7c68ee
[ "Apache-2.0" ]
15
2019-05-02T00:06:28.000Z
2022-03-25T03:11:14.000Z
tests/python/unittest/test_arith_stmt_simplify.py
ndl/tvm
6e4c6d7a3a840ae1f7f996c856357068ba7c68ee
[ "Apache-2.0" ]
5
2019-05-13T20:44:51.000Z
2019-09-25T19:56:29.000Z
tests/python/unittest/test_arith_stmt_simplify.py
ndl/tvm
6e4c6d7a3a840ae1f7f996c856357068ba7c68ee
[ "Apache-2.0" ]
5
2019-03-06T19:54:18.000Z
2022-02-01T14:27:58.000Z
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import tvm def test_stmt_simplify(): ib = tvm.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = tvm.var("n") with ib.for_range(0, n, name="i") as i: with ib.if_scope(i < 12): A[i] = C[i] body = tvm.stmt.LetStmt(n, 10, ib.get()) body = tvm.ir_pass.CanonicalSimplify(body) assert isinstance(body.body, tvm.stmt.Store) def test_thread_extent_simplify(): ib = tvm.ir_builder.create() A = ib.pointer("float32", name="A") C = ib.pointer("float32", name="C") n = tvm.var("n") tx = tvm.thread_axis("threadIdx.x") ty = tvm.thread_axis("threadIdx.y") ib.scope_attr(tx, "thread_extent", n) ib.scope_attr(tx, "thread_extent", n) ib.scope_attr(ty, "thread_extent", 1) with ib.if_scope(tx + ty < 12): A[tx] = C[tx + ty] body = tvm.stmt.LetStmt(n, 10, ib.get()) body = tvm.ir_pass.CanonicalSimplify(body) assert isinstance(body.body.body.body, tvm.stmt.Store) if __name__ == "__main__": test_stmt_simplify() test_thread_extent_simplify()
35.471698
62
0.685106
292
1,880
4.297945
0.386986
0.047809
0.050996
0.063745
0.337052
0.311554
0.311554
0.311554
0.311554
0.311554
0
0.014483
0.192021
1,880
52
63
36.153846
0.811718
0.4
0
0.466667
0
0
0.093609
0
0
0
0
0
0.066667
1
0.066667
false
0.066667
0.033333
0
0.1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
2de89c57ba2b6a38a0def3e77a44733886f67fe3
378
py
Python
Day02/part2.py
JavierRizzoA/AoC2021
948177b135f2570090cd3a13aafaa0199566248a
[ "Beerware" ]
null
null
null
Day02/part2.py
JavierRizzoA/AoC2021
948177b135f2570090cd3a13aafaa0199566248a
[ "Beerware" ]
null
null
null
Day02/part2.py
JavierRizzoA/AoC2021
948177b135f2570090cd3a13aafaa0199566248a
[ "Beerware" ]
null
null
null
x = 0 y = 0 aim = 0 with open('input') as f: for line in f: direction = line.split()[0] magnitude = int(line.split()[1]) if direction == 'forward': x += magnitude y += aim * magnitude elif direction == 'down': aim += magnitude elif direction == 'up': aim -= magnitude print(str(x * y))
23.625
40
0.481481
46
378
3.956522
0.521739
0.197802
0.175824
0.274725
0
0
0
0
0
0
0
0.021368
0.380952
378
15
41
25.2
0.75641
0
0
0
0
0
0.047619
0
0
0
0
0
0
1
0
false
0
0
0
0
0.066667
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2dea338f874c3ce26ae59e17c28ac999a26659a9
43,498
py
Python
ec2driver.py
venumurthy/ec2-driver
65b0482af3af80e34c00c327f54487d492933b25
[ "Apache-2.0" ]
null
null
null
ec2driver.py
venumurthy/ec2-driver
65b0482af3af80e34c00c327f54487d492933b25
[ "Apache-2.0" ]
null
null
null
ec2driver.py
venumurthy/ec2-driver
65b0482af3af80e34c00c327f54487d492933b25
[ "Apache-2.0" ]
null
null
null
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2014 Thoughtworks. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied. See the # License for the specific language governing permissions and limitations # under the License. """Connection to the Amazon Web Services - EC2 service""" from threading import Lock import base64 import time from boto import ec2 import boto.ec2.cloudwatch from boto import exception as boto_exc from boto.exception import EC2ResponseError from boto.regioninfo import RegionInfo from oslo.config import cfg from novaclient.v1_1 import client from ec2_rule_service import EC2RuleService from ec2_rule_transformer import EC2RuleTransformer from ec2driver_config import * from nova.i18n import _ from nova import block_device from nova.compute import power_state from nova.compute import task_states from nova import db from nova import exception from nova.image import glance from nova.openstack.common import log as logging from nova.openstack.common import loopingcall from nova.virt import driver from nova.virt import virtapi from credentials import get_nova_creds from instance_rule_refresher import InstanceRuleRefresher from openstack_group_service import OpenstackGroupService from openstack_rule_service import OpenstackRuleService from openstack_rule_transformer import OpenstackRuleTransformer import rule_comparator from group_rule_refresher import GroupRuleRefresher LOG = logging.getLogger(__name__) ec2driver_opts = [ cfg.StrOpt('snapshot_image_format', help='Snapshot image format (valid options are : ' 'raw, qcow2, vmdk, vdi). ' 'Defaults to same as source image'), cfg.StrOpt('datastore_regex', help='Regex to match the name of a datastore.'), cfg.FloatOpt('task_poll_interval', default=0.5, help='The interval used for polling of remote tasks.'), cfg.IntOpt('api_retry_count', default=10, help='The number of times we retry on failures, e.g., ' 'socket error, etc.'), cfg.IntOpt('vnc_port', default=5900, help='VNC starting port'), cfg.IntOpt('vnc_port_total', default=10000, help='Total number of VNC ports'), cfg.BoolOpt('use_linked_clone', default=True, help='Whether to use linked clone'), cfg.StrOpt('ec2_secret_access_key', help='The secret access key of the Amazon Web Services account'), cfg.StrOpt('ec2_access_key_id', help='The access key ID of the Amazon Web Services account'), ] CONF = cfg.CONF CONF.register_opts(ec2driver_opts, 'ec2driver') CONF.import_opt('my_ip', 'nova.netconf') TIME_BETWEEN_API_CALL_RETRIES = 1.0 EC2_STATE_MAP = { "pending": power_state.BUILDING, "running": power_state.RUNNING, "shutting-down": power_state.NOSTATE, "terminated": power_state.SHUTDOWN, "stopping": power_state.NOSTATE, "stopped": power_state.SHUTDOWN } DIAGNOSTIC_KEYS_TO_FILTER = ['group', 'block_device_mapping'] def set_nodes(nodes): """Sets EC2Driver's node.list. It has effect on the following methods: get_available_nodes() get_available_resource get_host_stats() To restore the change, call restore_nodes() """ global _EC2_NODES _EC2_NODES = nodes def restore_nodes(): """Resets EC2Driver's node list modified by set_nodes(). Usually called from tearDown(). """ global _EC2_NODES _EC2_NODES = [CONF.host] class EC2Driver(driver.ComputeDriver): capabilities = { "has_imagecache": True, "supports_recreate": True, } """EC2 hypervisor driver. Respurposing for EC2""" def __init__(self, virtapi, read_only=False): super(EC2Driver, self).__init__(virtapi) self.host_status_base = { 'vcpus': VCPUS, 'memory_mb': MEMORY_IN_MBS, 'local_gb': DISK_IN_GB, 'vcpus_used': 0, 'memory_mb_used': 0, 'local_gb_used': 100000000000, 'hypervisor_type': 'EC2', 'hypervisor_version': '1.0', 'hypervisor_hostname': CONF.host, 'cpu_info': {}, 'disk_available_least': 500000000000, } self._mounts = {} self._interfaces = {} self.creds = get_nova_creds() self.nova = client.Client(**self.creds) region = RegionInfo(name=aws_region, endpoint=aws_endpoint) self.ec2_conn = ec2.EC2Connection(aws_access_key_id=CONF.ec2driver.ec2_access_key_id, aws_secret_access_key=CONF.ec2driver.ec2_secret_access_key, host=host, port=port, region=region, is_secure=secure) self.cloudwatch_conn = ec2.cloudwatch.connect_to_region( aws_region, aws_access_key_id=CONF.ec2driver.ec2_access_key_id, aws_secret_access_key=CONF.ec2driver.ec2_secret_access_key) self.security_group_lock = Lock() self.instance_rule_refresher = InstanceRuleRefresher( GroupRuleRefresher( ec2_connection=self.ec2_conn, openstack_rule_service=OpenstackRuleService( group_service=OpenstackGroupService(self.nova.security_groups), openstack_rule_transformer=OpenstackRuleTransformer() ), ec2_rule_service=EC2RuleService( ec2_connection=self.ec2_conn, ec2_rule_transformer=EC2RuleTransformer(self.ec2_conn) ) ) ) if not '_EC2_NODES' in globals(): set_nodes([CONF.host]) def init_host(self, host): """Initialize anything that is necessary for the driver to function, including catching up with currently running VM's on the given host. """ return def list_instances(self): """Return the names of all the instances known to the virtualization layer, as a list. """ all_instances = self.ec2_conn.get_all_instances() instance_ids = [] for instance in all_instances: instance_ids.append(instance.id) return instance_ids def plug_vifs(self, instance, network_info): """Plug VIFs into networks.""" pass def unplug_vifs(self, instance, network_info): """Unplug VIFs from networks.""" pass def spawn(self, context, instance, image_meta, injected_files, admin_password, network_info=None, block_device_info=None): """Create a new instance/VM/domain on the virtualization platform. Once this successfully completes, the instance should be running (power_state.RUNNING). If this fails, any partial instance should be completely cleaned up, and the virtualization platform should be in the state that it was before this call began. :param context: security context <Not Yet Implemented> :param instance: nova.objects.instance.Instance This function should use the data there to guide the creation of the new instance. :param image_meta: image object returned by nova.image.glance that defines the image from which to boot this instance :param injected_files: User files to inject into instance. :param admin_password: set in instance. <Not Yet Implemented> :param network_info: :py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info` :param block_device_info: Information about block devices to be attached to the instance. """ LOG.info("***** Calling SPAWN *******************") LOG.info("****** %s" % instance._user_data) LOG.info("****** Allocating an elastic IP *********") elastic_ip_address = self.ec2_conn.allocate_address(domain='vpc') #Creating the EC2 instance flavor_type = flavor_map[instance.get_flavor().id] #passing user_data from the openstack instance which is Base64 encoded after decoding it. user_data = instance._user_data if user_data: user_data = base64.b64decode(user_data) reservation = self.ec2_conn.run_instances(aws_ami, instance_type=flavor_type, subnet_id=ec2_subnet_id, user_data=user_data) ec2_instance = reservation.instances ec2_id = ec2_instance[0].id self._wait_for_state(instance, ec2_id, "running", power_state.RUNNING) instance['metadata'].update({'ec2_id': ec2_id, 'public_ip_address': elastic_ip_address.public_ip}) LOG.info("****** Associating the elastic IP to the instance *********") self.ec2_conn.associate_address(instance_id=ec2_id, allocation_id=elastic_ip_address.allocation_id) def snapshot(self, context, instance, image_id, update_task_state): """Snapshot an image of the specified instance on EC2 and create an Image which gets stored in AMI (internally in EBS Snapshot) :param context: security context :param instance: nova.objects.instance.Instance :param image_id: Reference to a pre-created image that will hold the snapshot. """ LOG.info("***** Calling SNAPSHOT *******************") if instance['metadata']['ec2_id'] is None: raise exception.InstanceNotRunning(instance_id=instance['uuid']) # Adding the below line only alters the state of the instance and not # its image in OpenStack. update_task_state( task_state=task_states.IMAGE_UPLOADING, expected_state=task_states.IMAGE_SNAPSHOT) ec2_id = instance['metadata']['ec2_id'] ec_instance_info = self.ec2_conn.get_only_instances( instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None) ec2_instance = ec_instance_info[0] if ec2_instance.state == 'running': ec2_image_id = ec2_instance.create_image(name=str( image_id), description="Image from OpenStack", no_reboot=False, dry_run=False) LOG.info("Image has been created state to %s." % ec2_image_id) # The instance will be in pending state when it comes up, waiting forit to be in available self._wait_for_image_state(ec2_image_id, "available") image_api = glance.get_default_image_service() image_ref = glance.generate_image_url(image_id) metadata = {'is_public': False, 'location': image_ref, 'properties': { 'kernel_id': instance['kernel_id'], 'image_state': 'available', 'owner_id': instance['project_id'], 'ramdisk_id': instance['ramdisk_id'], 'ec2_image_id': ec2_image_id } } image_api.update(context, image_id, metadata) def reboot(self, context, instance, network_info, reboot_type, block_device_info=None, bad_volumes_callback=None): """Reboot the specified instance. After this is called successfully, the instance's state goes back to power_state.RUNNING. The virtualization platform should ensure that the reboot action has completed successfully even in cases in which the underlying domain/vm is paused or halted/stopped. :param instance: nova.objects.instance.Instance :param network_info: :py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info` :param reboot_type: Either a HARD or SOFT reboot :param block_device_info: Info pertaining to attached volumes :param bad_volumes_callback: Function to handle any bad volumes encountered """ if reboot_type == 'SOFT': self._soft_reboot( context, instance, network_info, block_device_info) elif reboot_type == 'HARD': self._hard_reboot( context, instance, network_info, block_device_info) def _soft_reboot(self, context, instance, network_info, block_device_info=None): LOG.info("***** Calling SOFT REBOOT *******************") ec2_id = instance['metadata']['ec2_id'] self.ec2_conn.reboot_instances(instance_ids=[ec2_id], dry_run=False) LOG.info("Soft Reboot Complete.") def _hard_reboot(self, context, instance, network_info, block_device_info=None): LOG.info("***** Calling HARD REBOOT *******************") self.power_off(instance) self.power_on(context, instance, network_info, block_device) LOG.info("Hard Reboot Complete.") @staticmethod def get_host_ip_addr(): """Retrieves the IP address of the dom0 """ LOG.info("***** Calling get_host_ip_addr *******************") return CONF.my_ip def set_admin_password(self, instance, new_pass): """Boto doesn't support setting the password at the time of creating an instance. hence not implemented. """ pass def inject_file(self, instance, b64_path, b64_contents): pass def resume_state_on_host_boot(self, context, instance, network_info, block_device_info=None): pass def rescue(self, context, instance, network_info, image_meta, rescue_password): pass def unrescue(self, instance, network_info): pass def poll_rebooting_instances(self, timeout, instances): pass def migrate_disk_and_power_off(self, context, instance, dest, instance_type, network_info, block_device_info=None): pass def finish_revert_migration(self, context, instance, network_info, block_device_info=None, power_on=True): pass def post_live_migration_at_destination(self, context, instance, network_info, block_migration=False, block_device_info=None): pass def power_off(self, instance): """Power off the specified instance. """ LOG.info("***** Calling POWER OFF *******************") ec2_id = instance['metadata']['ec2_id'] self.ec2_conn.stop_instances( instance_ids=[ec2_id], force=False, dry_run=False) self._wait_for_state(instance, ec2_id, "stopped", power_state.SHUTDOWN) def power_on(self, context, instance, network_info, block_device_info): """Power on the specified instance. """ LOG.info("***** Calling POWER ON *******************") ec2_id = instance['metadata']['ec2_id'] self.ec2_conn.start_instances(instance_ids=[ec2_id], dry_run=False) self._wait_for_state(instance, ec2_id, "running", power_state.RUNNING) def soft_delete(self, instance): """Deleting the specified instance """ self.destroy(instance) def restore(self, instance): pass def pause(self, instance): """Boto doesn't support pause and cannot save system state and hence we've implemented the closest functionality which is to poweroff the instance. :param instance: nova.objects.instance.Instance """ self.power_off(instance) def unpause(self, instance): """Since Boto doesn't support pause and cannot save system state, we had implemented the closest functionality which is to poweroff the instance. and powering on such an instance in this method. :param instance: nova.objects.instance.Instance """ self.power_on( context=None, instance=instance, network_info=None, block_device_info=None) def suspend(self, instance): """Boto doesn't support suspend and cannot save system state and hence we've implemented the closest functionality which is to poweroff the instance. :param instance: nova.objects.instance.Instance """ self.power_off(instance) def resume(self, context, instance, network_info, block_device_info=None): """Since Boto doesn't support suspend and we cannot save system state, we've implemented the closest functionality which is to power on the instance. :param instance: nova.objects.instance.Instance """ self.power_on(context, instance, network_info, block_device_info) def destroy(self, context, instance, network_info, block_device_info=None, destroy_disks=True, migrate_data=None): """Destroy the specified instance from the Hypervisor. If the instance is not found (for example if networking failed), this function should still succeed. It's probably a good idea to log a warning in that case. :param context: security context :param instance: Instance object as returned by DB layer. :param network_info: :py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info` :param block_device_info: Information about block devices that should be detached from the instance. :param destroy_disks: Indicates if disks should be destroyed :param migrate_data: implementation specific params """ LOG.info("***** Calling DESTROY *******************") if 'ec2_id' not in instance['metadata']: LOG.warning(_("Key '%s' not in EC2 instances") % instance['name'], instance=instance) return elif 'public_ip' not in instance['metadata'] and 'public_ip_address' not in instance['metadata']: print instance['metadata'] LOG.warning(_("Public IP is null"), instance=instance) return else: # Deleting the instance from EC2 ec2_id = instance['metadata']['ec2_id'] try: ec2_instances = self.ec2_conn.get_only_instances(instance_ids=[ec2_id]) except Exception: return if ec2_instances.__len__() == 0: LOG.warning(_("EC2 instance with ID %s not found") % ec2_id, instance=instance) return else: # get the elastic ip associated with the instance & disassociate # it, and release it elastic_ip_address = \ self.ec2_conn.get_all_addresses(addresses=instance['metadata']['public_ip_address'])[0] LOG.info("****** Disassociating the elastic IP *********") self.ec2_conn.disassociate_address(elastic_ip_address.public_ip) self.ec2_conn.stop_instances(instance_ids=[ec2_id], force=True) self.ec2_conn.terminate_instances(instance_ids=[ec2_id]) self._wait_for_state(instance, ec2_id, "terminated", power_state.SHUTDOWN) LOG.info("****** Releasing the elastic IP ************") self.ec2_conn.release_address(allocation_id=elastic_ip_address.allocation_id) def attach_volume(self, context, connection_info, instance, mountpoint, disk_bus=None, device_type=None, encryption=None): """Attach the disk to the instance at mountpoint using info. """ instance_name = instance['name'] if instance_name not in self._mounts: self._mounts[instance_name] = {} self._mounts[instance_name][mountpoint] = connection_info volume_id = connection_info['data']['volume_id'] # ec2 only attaches volumes at /dev/sdf through /dev/sdp self.ec2_conn.attach_volume(volume_map[volume_id], instance['metadata']['ec2_id'], "/dev/sdn", dry_run=False) def detach_volume(self, connection_info, instance, mountpoint, encryption=None): """Detach the disk attached to the instance. """ try: del self._mounts[instance['name']][mountpoint] except KeyError: pass volume_id = connection_info['data']['volume_id'] self.ec2_conn.detach_volume(volume_map[volume_id], instance_id=instance['metadata']['ec2_id'], device="/dev/sdn", force=False, dry_run=False) def swap_volume(self, old_connection_info, new_connection_info, instance, mountpoint): """Replace the disk attached to the instance. """ instance_name = instance['name'] if instance_name not in self._mounts: self._mounts[instance_name] = {} self._mounts[instance_name][mountpoint] = new_connection_info old_volume_id = old_connection_info['data']['volume_id'] new_volume_id = new_connection_info['data']['volume_id'] self.detach_volume(old_connection_info, instance, mountpoint) # wait for the old volume to detach successfully to make sure # /dev/sdn is available for the new volume to be attached time.sleep(60) self.ec2_conn.attach_volume(volume_map[new_volume_id], instance['metadata']['ec2_id'], "/dev/sdn", dry_run=False) return True def attach_interface(self, instance, image_meta, vif): if vif['id'] in self._interfaces: raise exception.InterfaceAttachFailed('duplicate') self._interfaces[vif['id']] = vif def detach_interface(self, instance, vif): try: del self._interfaces[vif['id']] except KeyError: raise exception.InterfaceDetachFailed('not attached') def get_info(self, instance): """Get the current status of an instance, by name (not ID!) :param instance: nova.objects.instance.Instance object Returns a dict containing: :state: the running state, one of the power_state codes :max_mem: (int) the maximum memory in KBytes allowed :mem: (int) the memory in KBytes used by the domain :num_cpu: (int) the number of virtual CPUs for the domain :cpu_time: (int) the CPU time used in nanoseconds """ LOG.info("*************** GET INFO ********************") if 'metadata' not in instance or 'ec2_id' not in instance['metadata']: raise exception.InstanceNotFound(instance_id=instance['name']) ec2_id = instance['metadata']['ec2_id'] ec2_instances = self.ec2_conn.get_only_instances(instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None) if ec2_instances.__len__() == 0: LOG.warning(_("EC2 instance with ID %s not found") % ec2_id, instance=instance) raise exception.InstanceNotFound(instance_id=instance['name']) ec2_instance = ec2_instances[0] LOG.info(ec2_instance) LOG.info("state %s max_mem %s mem %s flavor %s" % (EC2_STATE_MAP.get(ec2_instance.state), ec2_instance.ramdisk, ec2_instance.get_attribute('ramdisk', dry_run=False), ec2_instance.instance_type)) return {'state': EC2_STATE_MAP.get(ec2_instance.state), 'max_mem': ec2_instance.ramdisk, 'mem': ec2_instance.get_attribute('ramdisk', dry_run=False), 'num_cpu': 2, 'cpu_time': 0} def allow_key(self, key): for key_to_filter in DIAGNOSTIC_KEYS_TO_FILTER: if key == key_to_filter: return False return True def get_diagnostics(self, instance_name): """Return data about VM diagnostics. """ LOG.info("******* GET DIAGNOSTICS *********************************************") instance = self.nova.servers.get(instance_name) ec2_id = instance.metadata['ec2_id'] ec2_instances = self.ec2_conn.get_only_instances(instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None) if ec2_instances.__len__() == 0: LOG.warning(_("EC2 instance with ID %s not found") % ec2_id, instance=instance) raise exception.InstanceNotFound(instance_id=instance['name']) ec2_instance = ec2_instances[0] diagnostics = {} for key, value in ec2_instance.__dict__.items(): if self.allow_key(key): diagnostics['instance.' + key] = str(value) metrics = self.cloudwatch_conn.list_metrics(dimensions={'InstanceId': ec2_id}) import datetime for metric in metrics: end = datetime.datetime.utcnow() start = end - datetime.timedelta(hours=1) details = metric.query(start, end, 'Average', None, 3600) if len(details) > 0: diagnostics['metrics.' + str(metric)] = details[0] return diagnostics def get_all_bw_counters(self, instances): """Return bandwidth usage counters for each interface on each running VM. """ bw = [] return bw def get_all_volume_usage(self, context, compute_host_bdms): """Return usage info for volumes attached to vms on a given host. """ volusage = [] return volusage def block_stats(self, instance_name, disk_id): return [0L, 0L, 0L, 0L, None] def interface_stats(self, instance_name, iface_id): return [0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L] def get_console_output(self, instance): return 'EC2 CONSOLE OUTPUT\nANOTHER\nLAST LINE' def get_vnc_console(self, instance): return {'internal_access_path': 'EC2', 'host': 'EC2vncconsole.com', 'port': 6969} def get_spice_console(self, instance): return {'internal_access_path': 'EC2', 'host': 'EC2spiceconsole.com', 'port': 6969, 'tlsPort': 6970} def get_console_pool_info(self, console_type): return {'address': '127.0.0.1', 'username': 'EC2user', 'password': 'EC2password'} def _get_ec2_instance_ids_with_security_group(self, ec2_security_group): return [instance.id for instance in ec2_security_group.instances()] def _get_openstack_instances_with_security_group(self, openstack_security_group): return [instance for instance in (self.nova.servers.list()) if openstack_security_group.name in [group['name'] for group in instance.security_groups]] def _get_id_of_ec2_instance_to_update_security_group(self, ec2_instance_ids_for_security_group, ec2_ids_for_openstack_instances_for_security_group): return (set(ec2_ids_for_openstack_instances_for_security_group).symmetric_difference( set(ec2_instance_ids_for_security_group))).pop() def _should_add_security_group_to_instance(self, ec2_instance_ids_for_security_group, ec2_ids_for_openstack_instances_for_security_group): return len(ec2_instance_ids_for_security_group) < len(ec2_ids_for_openstack_instances_for_security_group) def _add_security_group_to_instance(self, ec2_instance_id, ec2_security_group): security_group_ids_for_instance = self._get_ec2_security_group_ids_for_instance(ec2_instance_id) security_group_ids_for_instance.append(ec2_security_group.id) self.ec2_conn.modify_instance_attribute(ec2_instance_id, "groupSet", security_group_ids_for_instance) def _remove_security_group_from_instance(self, ec2_instance_id, ec2_security_group): security_group_ids_for_instance = self._get_ec2_security_group_ids_for_instance(ec2_instance_id) security_group_ids_for_instance.remove(ec2_security_group.id) self.ec2_conn.modify_instance_attribute(ec2_instance_id, "groupSet", security_group_ids_for_instance) def _get_ec2_security_group_ids_for_instance(self, ec2_instance_id): security_groups_for_instance = self.ec2_conn.get_instance_attribute(ec2_instance_id, "groupSet")['groupSet'] security_group_ids_for_instance = [group.id for group in security_groups_for_instance] return security_group_ids_for_instance def _get_or_create_ec2_security_group(self, openstack_security_group): try: return self.ec2_conn.get_all_security_groups(openstack_security_group.name)[0] except (EC2ResponseError, IndexError) as e: LOG.warning(e) return self.ec2_conn.create_security_group(openstack_security_group.name, openstack_security_group.description) def refresh_security_group_rules(self, security_group_id): """This method is called after a change to security groups. All security groups and their associated rules live in the datastore, and calling this method should apply the updated rules to instances running the specified security group. An error should be raised if the operation cannot complete. """ LOG.info("************** REFRESH SECURITY GROUP RULES ******************") openstack_security_group = self.nova. security_groups.get(security_group_id) ec2_security_group = self._get_or_create_ec2_security_group(openstack_security_group) ec2_ids_for_ec2_instances_with_security_group = self._get_ec2_instance_ids_with_security_group( ec2_security_group) ec2_ids_for_openstack_instances_with_security_group = [ instance.metadata['ec2_id'] for instance in self._get_openstack_instances_with_security_group(openstack_security_group) ] self.security_group_lock.acquire() try: ec2_instance_to_update = self._get_id_of_ec2_instance_to_update_security_group( ec2_ids_for_ec2_instances_with_security_group, ec2_ids_for_openstack_instances_with_security_group ) should_add_security_group = self._should_add_security_group_to_instance( ec2_ids_for_ec2_instances_with_security_group, ec2_ids_for_openstack_instances_with_security_group) if should_add_security_group: self._add_security_group_to_instance(ec2_instance_to_update, ec2_security_group) else: self._remove_security_group_from_instance(ec2_instance_to_update, ec2_security_group) finally: self.security_group_lock.release() return True def refresh_security_group_members(self, security_group_id): LOG.info("************** REFRESH SECURITY GROUP MEMBERS ******************") LOG.info(security_group_id) return True def _get_allowed_group_name_from_openstack_rule_if_present(self, openstack_rule): return openstack_rule['group']['name'] if 'name' in openstack_rule['group'] else None def _get_allowed_ip_range_from_openstack_rule_if_present(self, openstack_rule): return openstack_rule['ip_range']['cidr'] if 'cidr' in openstack_rule['ip_range'] else None def refresh_instance_security_rules(self, instance): LOG.info("************** REFRESH INSTANCE SECURITY RULES ******************") LOG.info(instance) # TODO: lock for case when group is associated with multiple instances [Cameron & Ed] self.instance_rule_refresher.refresh(self.nova.servers.get(instance['id'])) return def refresh_provider_fw_rules(self): pass def get_available_resource(self, nodename): """Retrieve resource information. Updates compute manager resource info on ComputeNode table. This method is called when nova-compute launches and as part of a periodic task that records results in the DB. Since we don't have a real hypervisor, pretend we have lots of disk and ram. :param nodename: node which the caller want to get resources from a driver that manages only one node can safely ignore this :returns: Dictionary describing resources """ LOG.info("************** GET_AVAILABLE_RESOURCE ******************") if nodename not in _EC2_NODES: return {} dic = {'vcpus': VCPUS, 'memory_mb': MEMORY_IN_MBS, 'local_gb': DISK_IN_GB, 'vcpus_used': 0, 'memory_mb_used': 0, 'local_gb_used': 0, 'hypervisor_type': 'EC2', 'hypervisor_version': '1.0', 'hypervisor_hostname': nodename, 'disk_available_least': 0, 'cpu_info': '?'} return dic def ensure_filtering_rules_for_instance(self, instance_ref, network_info): return def get_instance_disk_info(self, instance_name): return def live_migration(self, context, instance_ref, dest, post_method, recover_method, block_migration=False, migrate_data=None): post_method(context, instance_ref, dest, block_migration, migrate_data) return def check_can_live_migrate_destination_cleanup(self, ctxt, dest_check_data): return def check_can_live_migrate_destination(self, ctxt, instance_ref, src_compute_info, dst_compute_info, block_migration=False, disk_over_commit=False): return {} def check_can_live_migrate_source(self, ctxt, instance_ref, dest_check_data): return def finish_migration(self, context, migration, instance, disk_info, network_info, image_meta, resize_instance, block_device_info=None, power_on=True): """Completes a resize :param migration: the migrate/resize information :param instance: nova.objects.instance.Instance being migrated/resized :param power_on: is True the instance should be powered on """ LOG.info("***** Calling FINISH MIGRATION *******************") ec2_id = instance['metadata']['ec2_id'] ec_instance_info = self.ec2_conn.get_only_instances( instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None) ec2_instance = ec_instance_info[0] # EC2 instance needs to be stopped to modify it's attribute. So we stop the instance, # modify the instance type in this case, and then restart the instance. ec2_instance.stop() self._wait_for_state(instance, ec2_id, "stopped", power_state.SHUTDOWN) new_instance_type = flavor_map[migration['new_instance_type_id']] ec2_instance.modify_attribute('instanceType', new_instance_type) def confirm_migration(self, migration, instance, network_info): """Confirms a resize, destroying the source VM. :param instance: nova.objects.instance.Instance """ LOG.info("***** Calling CONFIRM MIGRATION *******************") ec2_id = instance['metadata']['ec2_id'] ec_instance_info = self.ec2_conn.get_only_instances( instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None) ec2_instance = ec_instance_info[0] ec2_instance.start() self._wait_for_state(instance, ec2_id, "running", power_state.RUNNING) def pre_live_migration(self, context, instance_ref, block_device_info, network_info, disk, migrate_data=None): return def unfilter_instance(self, instance_ref, network_info): return def get_host_stats(self, refresh=False): """Return EC2 Host Status of name, ram, disk, network.""" stats = [] for nodename in _EC2_NODES: host_status = self.host_status_base.copy() host_status['hypervisor_hostname'] = nodename host_status['host_hostname'] = nodename host_status['host_name_label'] = nodename host_status['hypervisor_type'] = 'Amazon-EC2' host_status['vcpus'] = VCPUS host_status['memory_mb'] = MEMORY_IN_MBS host_status['local_gb'] = DISK_IN_GB stats.append(host_status) if len(stats) == 0: raise exception.NovaException("EC2Driver has no node") elif len(stats) == 1: return stats[0] else: return stats def host_power_action(self, host, action): """Reboots, shuts down or powers up the host.""" return action def host_maintenance_mode(self, host, mode): """Start/Stop host maintenance window. On start, it triggers guest VMs evacuation. """ if not mode: return 'off_maintenance' return 'on_maintenance' def set_host_enabled(self, host, enabled): """Sets the specified host's ability to accept new instances.""" if enabled: return 'enabled' return 'disabled' def get_disk_available_least(self): pass def add_to_aggregate(self, context, aggregate, host, **kwargs): pass def remove_from_aggregate(self, context, aggregate, host, **kwargs): pass def get_volume_connector(self, instance): return {'ip': '127.0.0.1', 'initiator': 'EC2', 'host': 'EC2host'} def get_available_nodes(self, refresh=False): return _EC2_NODES def instance_on_disk(self, instance): return False def list_instance_uuids(self): return [] def _wait_for_state(self, instance, ec2_id, desired_state, desired_power_state): """Wait for the state of the corrosponding ec2 instance to be in completely available state. :params:ec2_id: the instance's corrosponding ec2 id. :params:desired_state: the desired state of the instance to be in. """ def _wait_for_power_state(): """Called at an interval until the VM is running again. """ ec2_instance = self.ec2_conn.get_only_instances(instance_ids=[ec2_id]) state = ec2_instance[0].state if state == desired_state: LOG.info("Instance has changed state to %s." % desired_state) raise loopingcall.LoopingCallDone() def _wait_for_status_check(): """Power state of a machine might be ON, but status check is the one which gives the real """ ec2_instance = self.ec2_conn.get_all_instance_status(instance_ids=[ec2_id])[0] if ec2_instance.system_status.status == 'ok': LOG.info("Instance status check is %s / %s" % (ec2_instance.system_status.status, ec2_instance.instance_status.status)) raise loopingcall.LoopingCallDone() #waiting for the power state to change timer = loopingcall.FixedIntervalLoopingCall(_wait_for_power_state) timer.start(interval=1).wait() #waiting for the status of the machine to be in running if desired_state == 'running': timer = loopingcall.FixedIntervalLoopingCall(_wait_for_status_check) timer.start(interval=0.5).wait() def _wait_for_image_state(self, ami_id, desired_state): """Timer to wait for the image/snapshot to reach a desired state :params:ami_id: correspoding image id in Amazon :params:desired_state: the desired new state of the image to be in. """ def _wait_for_state(): """Called at an interval until the AMI image is available.""" try: images = self.ec2_conn.get_all_images(image_ids=[ami_id], owners=None, executable_by=None, filters=None, dry_run=None) state = images[0].state # LOG.info("\n\n\nImage id = %s" % ami_id + ", state = %s\n\n\n" % state) if state == desired_state: LOG.info("Image has changed state to %s." % desired_state) raise loopingcall.LoopingCallDone() except boto_exc.EC2ResponseError: pass timer = loopingcall.FixedIntervalLoopingCall(_wait_for_state) timer.start(interval=0.5).wait() class EC2VirtAPI(virtapi.VirtAPI): def instance_update(self, context, instance_uuid, updates): return db.instance_update_and_get_original(context, instance_uuid, updates) def aggregate_get_by_host(self, context, host, key=None): return db.aggregate_get_by_host(context, host, key=key) def aggregate_metadata_add(self, context, aggregate, metadata, set_delete=False): return db.aggregate_metadata_add(context, aggregate['id'], metadata, set_delete=set_delete) def aggregate_metadata_delete(self, context, aggregate, key): return db.aggregate_metadata_delete(context, aggregate['id'], key) def security_group_get_by_instance(self, context, instance): return db.security_group_get_by_instance(context, instance['uuid']) def security_group_rule_get_by_security_group(self, context, security_group): return db.security_group_rule_get_by_security_group( context, security_group['id']) def provider_fw_rule_get_all(self, context): return db.provider_fw_rule_get_all(context) def agent_build_get_by_triple(self, context, hypervisor, os, architecture): return db.agent_build_get_by_triple(context, hypervisor, os, architecture) def instance_type_get(self, context, instance_type_id): return db.instance_type_get(context, instance_type_id) def block_device_mapping_get_all_by_instance(self, context, instance, legacy=True): bdms = db.block_device_mapping_get_all_by_instance(context, instance['uuid']) if legacy: bdms = block_device.legacy_mapping(bdms) return bdms def block_device_mapping_update(self, context, bdm_id, values): return db.block_device_mapping_update(context, bdm_id, values)
43.324701
161
0.635271
5,197
43,498
5.038291
0.141235
0.040215
0.013863
0.011228
0.377253
0.322563
0.268026
0.234571
0.200619
0.173579
0
0.012589
0.273162
43,498
1,003
162
43.367896
0.8156
0.036829
0
0.224138
0
0
0.107464
0.003848
0
0
0
0.000997
0
0
null
null
0.034483
0.051724
null
null
0.001567
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2df494efc476f1235a7fd4bca3824c402be41058
11,450
py
Python
project/save_FlowFile_BPFormat.py
wesleybowman/karsten
ef4b2d6debae605902d76cd0484e71c0ba74fdd1
[ "MIT" ]
1
2015-05-04T17:48:56.000Z
2015-05-04T17:48:56.000Z
project/save_FlowFile_BPFormat.py
wesleybowman/karsten
ef4b2d6debae605902d76cd0484e71c0ba74fdd1
[ "MIT" ]
null
null
null
project/save_FlowFile_BPFormat.py
wesleybowman/karsten
ef4b2d6debae605902d76cd0484e71c0ba74fdd1
[ "MIT" ]
1
2021-11-15T17:53:19.000Z
2021-11-15T17:53:19.000Z
from __future__ import division import numpy as np from rawADCPclass import rawADCP from datetime import datetime from datetime import timedelta import scipy.io as sio import scipy.interpolate as sip import matplotlib.pyplot as plt import seaborn def date2py(matlab_datenum): python_datetime = datetime.fromordinal(int(matlab_datenum)) + \ timedelta(days=matlab_datenum%1) - timedelta(days = 366) return python_datetime def py2date(dt): mdn = dt + timedelta(days = 366) frac_seconds = (dt-datetime(dt.year,dt.month,dt.day,0,0,0)).seconds / (24.0 * 60.0 * 60.0) frac_microseconds = dt.microsecond / (24.0 * 60.0 * 60.0 * 1000000.0) return mdn.toordinal() + frac_seconds + frac_microseconds def calc_ensemble(x, ens, ens_dim): #initialize input ens = int(ens) #x = x[:, None] if ens_dim == 1: ens_size = np.floor(x.shape[0]/60) else: pass #x_ens = np.empty((ens_size, 1, ens)) x_ens = np.empty((ens_size, ens)) x_ens[:] = np.nan for j in xrange(ens): if ens_dim == 1: ind_ens = np.arange(j, x.shape[0] - (ens - j), ens) #x_ens[..., j] = x[ind_ens] x_ens[..., j] = x[ind_ens] else: pass #x_ens = np.nanmean(x_ens, axis=2) x_ens = np.nanmean(x_ens, axis=1) return x_ens def rotate_coords(x, y, theta): ''' Similar to "rotate_to_channelcoords.m" code, theta is now the angle between the old axis and the new x-axis (CCw is positive) ''' xnew = x * np.cos(theta) + y * np.sin(theta) ynew = -x * np.sin(theta) + y * np.cos(theta) return xnew, ynew def rotate_to_true(X, Y, theta=-19): ''' % X,Y are the X and Y coordinates (could be speeds) relative to magnetic % north -- inputs can be vectors % x,y are the coordinates relative to true north % This function assumes the measured location is Nova Scotia where the % declination angle is -19 degrees. % % Sept 29, 2012: Changed print statement % % Sept 20, 2012: Modified the function to allow for theta to be input. % Default will remain at -19 degrees, but this may not be accurate for all % places in Nova Scotia. ''' print 'Rotating velocities to be relative to true north (declination = {0})'.format(theta) Theta = theta * np.pi / 180 x = X * np.cos(Theta) + Y * np.sin(Theta) y = -X * np.sin(Theta) + Y * np.cos(Theta) return x, y def get_DirFromN(u,v): ''' #This function computes the direction from North with the output in degrees #and measured clockwise from north. # # Inputs: # u: eastward component # v: northward component ''' theta = np.arctan2(u,v) * 180 / np.pi ind = np.where(theta<0) theta[ind] = theta[ind] + 360 return theta def sign_speed(u_all, v_all, s_all, dir_all, flood_heading): if type(flood_heading)==int: flood_heading += np.array([-90, 90]) s_signed_all = np.empty(s_all.shape) s_signed_all.fill(np.nan) PA_all = np.zeros(s_all.shape[-1]) for i in xrange(s_all.shape[-1]): u = u_all[:, i] v = v_all[:, i] dir = dir_all[:, i] s = s_all[:, i] #determine principal axes - potentially a problem if axes are very kinked # since this would misclassify part of ebb and flood PA, _ = principal_axis(u, v) PA_all[i] = PA # sign speed - eliminating wrap-around dir_PA = dir - PA dir_PA[dir_PA < -90] += 360 dir_PA[dir_PA > 270] -= 360 #general direction of flood passed as input argument if flood_heading[0] <= PA <= flood_heading[1]: ind_fld = np.where((dir_PA >= -90) & (dir_PA<90)) s_signed = -s s_signed[ind_fld] = s[ind_fld] else: ind_ebb = np.where((dir_PA >= -90) & (dir_PA<90)) s_signed = s s_signed[ind_ebb] = -s[ind_ebb] s_signed_all[:, i] = s_signed return s_signed_all, PA_all def principal_axis(u, v): #create velocity matrix U = np.vstack((u,v)).T #eliminate NaN values U = U[~np.isnan(U[:, 0]), :] #convert matrix to deviate form rep = np.tile(np.mean(U, axis=0), [len(U), 1]) U -= rep #compute covariance matrix R = np.dot(U.T, U) / (len(U) - 1) #calculate eigenvalues and eigenvectors for covariance matrix lamb, V = np.linalg.eig(R) #sort eignvalues in descending order so that major axis is given by first eigenvector # sort in descending order with indices ilamb = sorted(range(len(lamb)), key=lambda k: lamb[k], reverse=True) lamb = sorted(lamb, reverse=True) # reconstruct the eigenvalue matrix lamb = np.diag(lamb) #reorder the eigenvectors V = V[:, ilamb] #rotation angle of major axis in radians relative to cartesian coordiantes ra = np.arctan2(V[0,1], V[1,1]) #express principal axis in compass coordinates # WES_COMMENT: may need to change this, cause in original is -ra PA = ra * 180 / np.pi + 90 #variance captured by principal varxp_PA = np.diag(lamb[0]) / np.trace(lamb) return PA, varxp_PA class Struct: def __init__(self, **entries): self.__dict__.update(entries) def save_FlowFile_BPFormat(fileinfo, adcp, rbr, params, options, debug=False): comments = ['data is in Polagye Tools format', 'data.east_vel and data.north_vel are relative to true north', 'The parameters were set by ' + fileinfo['paramfile']] day1 = date2py(adcp['mtime'][0][0]) print day1 #date_time = [date2py(tval[0]) for tval in adcp.mtime[:]] datenum = datetime(day1.year,1,1) + timedelta(365) datenum = datenum.toordinal() yd = adcp['mtime'][:].flatten() - datenum tind = np.where((yd > params['tmin']) & (yd < params['tmax']))[0] pres = {} time = {} time['mtime'] = adcp['mtime'][:].flatten()[tind] dt = np.nanmean(np.diff(time['mtime'])) if not rbr: print 'Depths measured by ADCP not yet coded.' comments.append('Depths as measured by ADCP') else: print 'Ensemble averaging rbr data' comments.append('Depths as measured by RBR sensor') nens = round(dt/(rbr.mtime[1] - rbr.mtime[0])) temp = np.arange(rbr.mtime[nens/2-1], rbr.mtime[-1-nens/2], dt) #temp2 = np.r_[rbr.mtime[nens/2-1]: rbr.mtime[-1-nens/2]: dt] mtimeens = np.arange(rbr.mtime[nens/2-1], rbr.mtime[-1-nens/2], dt) mtimeens = mtimeens + params['rbr_hr_offset'] / 24 depthens = calc_ensemble(rbr.depth, nens, 1) temp = sip.interp1d(mtimeens, depthens, kind='linear') pres['surf']= temp(time['mtime']) + params['dabPS'] if debug: # Load in matlab values for testing filename = './140703-EcoEII_database/scripts_examples/mtime.mat' mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True) matTimes = mat['mtimeens'] filename = './140703-EcoEII_database/scripts_examples/dt.mat' mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True) matdt = mat['dt'] filename = './140703-EcoEII_database/scripts_examples/depthens.mat' mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True) matdepthens = mat['depthens'] filename = './140703-EcoEII_database/scripts_examples/time.mat' mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True) matmtime = mat['mtime'] print matTimes.shape print temp - matTimes print temp2 - matTimes print dt - matdt print depthens - matdepthens print 'time' print time['mtime'] - matmtime ## zlevels data = {} z = adcp['config']['ranges'][:] + params['dabADCP'] z = z.flatten() zind = np.where((z > params['zmin']) & (z < params['zmax']))[0] data['bins'] = z[zind] ## Currents data['vert_vel'] = adcp['vert_vel'][:][tind][:, zind] data['error_vel'] = adcp['error_vel'][:][tind][:, zind] # If compass wasn't calibrated if 'hdgmod' in params: adcp['east_vel'][:], adcp['north_vel'][:] = rotate_coords(adcp['east_vel'][:], adcp['north_vel'][:], params['hdgmod']) comments.append('East and north velocity rotated by params.hdgmod') # Rotate east_vel and north_vel to be relative to true north data['east_vel'], data['north_vel'] = \ rotate_to_true(adcp['east_vel'][:][tind][:, zind], adcp['north_vel'][:][tind][:, zind], params['declination']) # Direction data['dir_vel'] = get_DirFromN(data['east_vel'],data['north_vel']) # Signed Speed spd_all = np.sqrt(data['east_vel']**2+data['north_vel']**2) # Determine flood and ebb based on principal direction (Polagye Routine) print 'Getting signed speed (Principal Direction Method) -- used all speeds' s_signed_all, PA_all = sign_speed(data['east_vel'], data['north_vel'], spd_all, data['dir_vel'], params['flooddir']) data['mag_signed_vel'] = s_signed_all if options['showRBRavg'] or debug: print 'Plotting RBR vs average' plt.plot(rbr.mtime + params['rbr_hr_offset'] / 24, rbr.depth+params['dabPS'], label='RBR') plt.plot(time['mtime'], pres['surf'], 'r', label='AVG') plt.xlabel('Time') plt.ylabel('Elevation') plt.legend(bbox_to_anchor=(0, 0, 1, 1), bbox_transform=plt.gcf().transFigure) plt.show() if options['showPA'] or debug: print 'Plotting PA vs mean' plt.plot(PA_all, data['bins'], label='PA') plt.plot(np.array([PA_all[0], PA_all[-1]]), np.array([np.mean(pres['surf']), np.mean(pres['surf'])]), label='mean') plt.xlabel('Principal Axis Direction\n(clockwise from north)') plt.ylabel('z (m)') plt.legend(bbox_to_anchor=(0, 0, 1, 1), bbox_transform=plt.gcf().transFigure) plt.show() ## save lon = params['lon'] lat = params['lat'] outfile = fileinfo['outdir'] + fileinfo['flowfile'] print 'Saving data to {0}'.format(outfile) saveDict = {'data':data, 'pres':pres, 'time':time, 'lon':lon, 'lat':lat, 'params':params, 'comments':comments} #save(outfile,'data','pres','time','lon','lat','params','Comments') ## Save metadata #metadata.progname=[mfilename('fullpath')]; #metadata.date = datestr(now); #metadata.paramfile = fileinfo.paramfile; #save(outfile,'metadata','-append') return saveDict if __name__ == '__main__': filename = '140703-EcoEII_database/data/GP-120726-BPd_raw.mat' data = rawADCP(filename) rawdata = rawADCP(filename) #adcp = Struct(**data.adcp) #rawADCP = data.adcp adcp = data.adcp #params = Struct(**data.saveparams) params = data.saveparams rbr = Struct(**data.rbr) # save_FlowFile_BPFormat(data.fileinfo, data.adcp, data.rbr, # data.saveparams, data.options) saveDict = \ save_FlowFile_BPFormat(data.fileinfo, adcp, rbr, params, data.options)
32.902299
94
0.599301
1,571
11,450
4.24634
0.253342
0.006596
0.008994
0.020986
0.20027
0.178084
0.10748
0.097137
0.090541
0.082147
0
0.024283
0.262707
11,450
347
95
32.997118
0.765932
0.14524
0
0.080808
0
0
0.144715
0.030865
0
0
0
0
0
0
null
null
0.010101
0.045455
null
null
0.075758
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
2df89db144b707f4b0822c6ae9ce1acc6ef701ad
482
py
Python
core/src/zeit/content/author/browser/interfaces.py
rickdg/vivi
16134ac954bf8425646d4ad47bdd1f372e089355
[ "BSD-3-Clause" ]
5
2019-05-16T09:51:29.000Z
2021-05-31T09:30:03.000Z
core/src/zeit/content/author/browser/interfaces.py
rickdg/vivi
16134ac954bf8425646d4ad47bdd1f372e089355
[ "BSD-3-Clause" ]
107
2019-05-24T12:19:02.000Z
2022-03-23T15:05:56.000Z
core/src/zeit/content/author/browser/interfaces.py
rickdg/vivi
16134ac954bf8425646d4ad47bdd1f372e089355
[ "BSD-3-Clause" ]
3
2020-08-14T11:01:17.000Z
2022-01-08T17:32:19.000Z
from zeit.cms.i18n import MessageFactory as _ import zope.formlib.interfaces import zope.interface @zope.interface.implementer(zope.formlib.interfaces.IWidgetInputError) class DuplicateAuthorWarning(Exception): def doc(self): return _( u'An author with the given name already exists. ' u'If you\'d like to create another author with the same ' u'name anyway, check "Add duplicate author" ' u'and save the form again.')
32.133333
70
0.692946
62
482
5.354839
0.725806
0.060241
0.126506
0
0
0
0
0
0
0
0
0.005405
0.232365
482
14
71
34.428571
0.891892
0
0
0
0
0
0.246888
0
0
0
0
0
0
1
0.090909
false
0
0.272727
0.090909
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
2df9e5faeeb7d475c0095425ade4eae6bf0cbee6
7,009
py
Python
rif_template.py
EngRaff92/RDL_REG_GEN
1da36a247552217d009b41b035ddda742ad2aa3e
[ "MIT" ]
2
2022-01-11T19:22:16.000Z
2022-01-11T20:19:55.000Z
rif_template.py
EngRaff92/RDL_REG_GEN
1da36a247552217d009b41b035ddda742ad2aa3e
[ "MIT" ]
null
null
null
rif_template.py
EngRaff92/RDL_REG_GEN
1da36a247552217d009b41b035ddda742ad2aa3e
[ "MIT" ]
null
null
null
header = """/* Icebreaker and IceSugar RSMB5 project - RV32I for Lattice iCE40 With complete open-source toolchain flow using: -> yosys -> icarus verilog -> icestorm project Tests are written in several languages -> Systemverilog Pure Testbench (Vivado) -> UVM testbench (Vivado) -> PyUvm (Icarus) -> Formal either using SVA and PSL (Vivado) or cuncurrent assertions with Yosys Copyright (c) 2021 Raffaele Signoriello (raff.signoriello92@gmail.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* This file contains register parameters and is autogenerated */ """ sv_inclusion = """ `ifndef COCOTB_SIM // Main Inclusion `else // Main Inclusion `endif """ module_name_param = """ // Main Module module gen_rif #( // Parameter Declaration parameter REG_WIDTH = 32, parameter ERROUT_IF_NOT_ACCESS = 1 )""" standard_rif_input_ports = """ ( // Port Declaration // General RIF input port input logic rif_clk, // Clock input logic rif_arst, // Asynchronous reset active high input logic rif_write, // If 0 -> Read if 1 -> Write input logic rif_cs, // States if the slave has been properly selected input logic [REG_WIDTH-1:0] rif_addr, // Address coming into the bus input logic [REG_WIDTH-1:0] rif_wdata, // Write Data coming into the bus""" hw_write_template_port = """ input logic [REG_WIDTH-1:0] $input_port_hw_rw_access_name,""" hw_read_template_port = """ output logic [REG_WIDTH-1:0] $output_port_hw_rw_access_name,""" standard_rif_output_ports = """ // General RIF output ports output logic [REG_WIDTH-1:0] rif_rdata, // Read Data coming out the bus output logic rif_error, // Give error in few specific conditions only output logic rif_ready // Is controlled by the slave and claims if the specifc slave is busy or not );""" set_of_decoder_flags = """ logic $dec_val;""" set_register = """ logic [REG_WIDTH-1:0] $reg_rw;""" internal_additional_signals = """ // Register Access Process logic error_handler, error_access; logic wr_rq, rd_rq; // Register decoder we are addressing 1Word at time so remove the first 2 bits logic [REG_WIDTH-1:0] reg_dec, reg_dec_dly;""" internal_decoder_signals_generation = """ assign reg_dec = rif_addr >> 2; always_ff@(posedge rif_clk or posedge rif_arst) begin if(rif_arst) reg_dec_dly <= 'h0; else reg_dec_dly <= reg_dec; end""" internal_wr_rd_request = """ // Assign the WR_REQUEST and RD_REQUEST assign wr_rq = rif_write & rif_cs; assign rd_rq = ~rif_write & rif_cs; // Register the request to be used for the READY signal logic [1:0] regsistered_request; always_ff @(posedge rif_clk or posedge rif_arst) begin : request_reg if(rif_arst) begin regsistered_request <= 2'b11; end else begin // Regardless of the read of write request we have to register the CS regsistered_request[0] <= (~rif_cs); regsistered_request[1] <= regsistered_request[0]; end end """ initialize_decoder_state = """ // Address decoding with full combo logic always_comb begin: addres_decoding // Initialize error_access = 1'b0;""" init_dec_access = """ $dec_val = 1'b0;""" case_switch_over_address = """ // Select using the address case (rif_addr)""" selection = """ $define_name: begin $dec_val = 1'b1; end""" defualt_end_case = """ default: begin if(ERROUT_IF_NOT_ACCESS) error_access = 1'b1; else error_access = 1'b0; end endcase // Endcase end // addres_decoding """ initialize_write_decoder_std = """ // Register write access always_ff @(posedge rif_clk or posedge rif_arst) begin : proc_reg_write_access if(rif_arst) begin rif_rdata <= 'h0;""" initialize_write_decoder_init_start = """ $reg_name <= $reset_val; """ initialize_write_decoder_init_end = """ end else begin: reg_write_decoder""" register_write_decoder_start = """ // Logic for HW = R and SW = RW if($dec_val) begin if(wr_rq) begin $reg_name <= rif_wdata & $sw_write_mask; end end""" register_write_decoder_end = """ end // proc_reg_write_access """ errorr_handler_logic_start = """ // check the error using COMBO logic to fire an error if RD happens on a RO register always_comb begin: read_process_error_handle""" errorr_handler_logic = """ // Logic for HW = W and SW = RO if($dec_val) begin if(wr_rq) begin error_handler = 1'b1; end else if(rd_rq) begin rif_rdata = $read_reg & $sw_read_mask; error_handler = 1'b0; end end""" errorr_handler_logic_end = """ end // read_process_error_handle """ errorr_handler_write_logic_start = """ // check the error using COMBO logic to fire an error if RD happens on a WO register always_comb begin: write_process_error_handle""" errorr_handler_write_logic = """ // Logic for HW = R and SW = WO if($dec_val) begin if(rd_rq) begin error_handler = 1'b1; rif_rdata = 'h0' end else begin error_handler = 1'b0; end end""" errorr_handler_write_logic_end = """ end // write_process_error_handle """ internal_latest_assignement = """ // assign the Error output assign rif_error = rif_cs ? (error_handler | error_access) : 'h0; // Assign the ready signal assign rif_ready = &(regsistered_request); """ assign_for_hw_read_policy_reg = """ assign $out_port = rif_cs ? ($reg_name & $hw_read_mask) : 'h0;""" assign_for_hw_write_policy_reg = """ assign $reg_name = $in_port & $hw_write_mask;""" end_module_rif = """ endmodule : gen_rif"""
31.151111
119
0.66329
966
7,009
4.569358
0.292961
0.019937
0.020616
0.022202
0.177843
0.159493
0.112143
0.084277
0.057997
0.057997
0
0.012165
0.249394
7,009
225
120
31.151111
0.826839
0
0
0.195652
0
0
0.844223
0.049786
0
0
0
0
0.005435
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
2dfac8a859dbed739d1b56982bcc545aec26ba63
9,229
py
Python
modmail/config.py
fossabot/modmail-1
ffbc3f981efb455f920e8c9f52657fbc2b802816
[ "MIT" ]
null
null
null
modmail/config.py
fossabot/modmail-1
ffbc3f981efb455f920e8c9f52657fbc2b802816
[ "MIT" ]
null
null
null
modmail/config.py
fossabot/modmail-1
ffbc3f981efb455f920e8c9f52657fbc2b802816
[ "MIT" ]
null
null
null
import asyncio import datetime import json import logging import os import sys import typing from pathlib import Path from typing import Any, Dict, Optional, Tuple import discord import toml from discord.ext.commands import BadArgument from pydantic import BaseModel from pydantic import BaseSettings as PydanticBaseSettings from pydantic import Field, SecretStr from pydantic.env_settings import SettingsSourceCallable from pydantic.types import conint log = logging.getLogger(__name__) CONFIG_PATHS: list = [ f"{os.getcwd()}/config.toml", f"{os.getcwd()}/modmail/config.toml", "./config.toml", ] DEFAULT_CONFIG_PATHS = [os.path.join(os.path.dirname(__file__), "config-default.toml")] def determine_file_path( paths=typing.Union[list, tuple], config_type: str = "default" ) -> typing.Union[str, None]: path = None for file_path in paths: config_file = Path(file_path) if (config_file).exists(): path = config_file log.debug(f"Found {config_type} config at {file_path}") break return path or None DEFAULT_CONFIG_PATH = determine_file_path(DEFAULT_CONFIG_PATHS) USER_CONFIG_PATH = determine_file_path(CONFIG_PATHS, config_type="") def toml_default_config_source(settings: PydanticBaseSettings) -> Dict[str, Any]: """ A simple settings source that loads variables from a toml file from within the module's source folder. Here we happen to choose to use the `env_file_encoding` from Config when reading `config-default.toml` """ return dict(**toml.load(DEFAULT_CONFIG_PATH)) def toml_user_config_source(settings: PydanticBaseSettings) -> Dict[str, Any]: """ A simple settings source that loads variables from a toml file from within the module's source folder. Here we happen to choose to use the `env_file_encoding` from Config when reading `config-default.toml` """ if USER_CONFIG_PATH: return dict(**toml.load(USER_CONFIG_PATH)) else: return dict() class BaseSettings(PydanticBaseSettings): class Config: extra = "ignore" env_file = ".env" env_file_encoding = "utf-8" @classmethod def customise_sources( cls, init_settings: SettingsSourceCallable, env_settings: SettingsSourceCallable, file_secret_settings: SettingsSourceCallable, ) -> Tuple[SettingsSourceCallable, ...]: return ( env_settings, init_settings, file_secret_settings, toml_user_config_source, toml_default_config_source, ) class ThreadBaseSettings(BaseSettings): class Config: env_prefix = "thread." # @classmethod # def alias_generator(cls, string: str) -> str: # return f"thread.{super.__name__}.{string}" class BotActivityConfig(BaseSettings): twitch_url: str = "https://www.twitch.tv/discordmodmail/" class BotConfig(BaseSettings): prefix: str = "?" activity: BotActivityConfig token: str = None modmail_guild_id: str = None guild_id: str = None multi_bot: bool = False log_url: str = None log_url_prefix = "/" github_token: SecretStr = None database_type: str = "mongodb" # TODO limit to specific strings enable_plugins: bool = True enable_eval: bool = True data_collection = True owners: str = 1 connection_uri: str = None level_permissions: dict = None class Config: # env_prefix = "bot." allow_mutation = False class ColorsConfig(BaseSettings): main_color: str = str(discord.Colour.blurple()) error_color: str = str(discord.Colour.red()) recipient_color: str = str(discord.Colour.green()) mod_color: str = str(discord.Colour.blue()) class ChannelConfig(BaseSettings): # all of the below should be validated to channels # either by name or by int main_category: str = None fallback_category: str = None log_channel: str = None mention_channel: str = None update_channel: str = None class DevConfig(BaseSettings): """ Developer specific configuration. These settings should not be changed unless you know what you're doing. """ log_level: conint(ge=0, le=50) = getattr(logging, "NOTICE", 25) class EmojiConfig(BaseSettings): """ Standard emojis that the bot uses when a specific emoji is not defined for a specific use. """ sent_emoji: str = "\\N{WHITE HEAVY CHECK MARK}" # TODO type as a discord emoji blocked_emoji: str = "\\N{NO ENTRY SIGN}" # TODO type as a discord emoji class InternalConfig(BaseModel): # do NOT set these yourself. The bot will handle these activity_message: str = None activity_type: None = None status: None = None dm_disabled: int = 0 # moderation blocked: dict = dict() blocked_roles: dict = dict() blocked_whitelist: list = dict() command_permissions: dict = dict() level_permissions: dict = dict() override_command_level: dict = dict() # threads snippets: dict = dict() notifications: dict = dict() subscriptions: dict = dict() closures: dict = dict() # misc plugins: list = list() aliases: dict = dict() auto_triggers: dict = dict() command_permissions: dict = dict() level_permissions: dict = dict() class Config: arbitrary_types_allowed = True class MentionConfig(BaseSettings): alert_on_mention: bool = False silent_alert_on_mention: bool = False mention_channel: int = None class SnippetConfig(BaseSettings): anonmous_snippets: bool = False use_regex_autotrigger: bool = False class ThreadAnonConfig(ThreadBaseSettings): username: str = "Response" footer: str = "Staff Team" class ThreadAutoCloseConfig(ThreadBaseSettings): time: datetime.timedelta = 0 silently: bool = False response: str = "This thread has been closed automatically due to inactivity after {timeout}." class ThreadCloseConfig(ThreadBaseSettings): footer: str = "Replying will create a new thread" title: str = "Thread Closed" response: str = "{closer.mention} has closed this Modmail thread." on_leave: bool = False on_leave_reason: str = "The recipient has left the server." self_close_response: str = "You have closed this Modmail thread." class ThreadConfirmCreationConfig(ThreadBaseSettings): enabled: bool = False title: str = "Confirm thread creation" response: str = "React to confirm thread creation which will directly contact the moderators" accept_emoji: str = "\N{WHITE HEAVY CHECK MARK}" # TODO type as a discord emoji deny_emoji: str = "\N{NO ENTRY SIGN}" # TODO type as a discord emoji class ThreadCooldownConfig(ThreadBaseSettings): time: datetime.timedelta = 0 embed_title: str = "Message not sent!" response: str = "You must wait for {delta} before you can contact me again." class ThreadCreationConfig(ThreadBaseSettings): response: str = "The staff team will get back to you as soon as possible." footer: str = "Your message has been sent" title: str = "Thread Created" class ThreadDisabledConfig(ThreadBaseSettings): new_title: str = "Not Delivered" new_response: str = "We are not accepting new threads." new_footer: str = "Please try again later..." current_title: str = "Not Delivered" current_response: str = "We are not accepting any messages." current_footer: str = "Please try again later..." class ThreadMoveConfig(ThreadBaseSettings): title: str = "Thread Moved" notify: bool = False notify_mods: bool = False response: str = "This thread has been moved." class ThreadSelfClosableConfig(ThreadBaseSettings): enabled: bool = False lock_emoji: str = "\N{LOCK}" creation_footer: str = "Click the lock to close the thread" class ThreadConfig(BaseSettings): anon_reply_without_command: bool = False reply_without_command: bool = False plain_reply_without_command: bool = False mention: str = "@here" user_typing: bool = False mod_typing: bool = False transfer_reactions: bool = True contact_silently: bool = False account_age: datetime.timedelta = 0 guild_age: datetime.timedelta = 0 mod_tag: str = "" show_timestamp: bool = True anon: ThreadAnonConfig auto_close: ThreadAutoCloseConfig close: ThreadCloseConfig confirm_creation: ThreadConfirmCreationConfig cooldown: ThreadCooldownConfig creation: ThreadCreationConfig disabled: ThreadDisabledConfig move: ThreadMoveConfig self_closable: ThreadSelfClosableConfig class UpdateConfig(BaseSettings): disable_autoupdates: bool = False update_notifications: bool = True class Config: allow_mutation = False env_prefix = "updates." class ModmailConfig(BaseSettings): bot: BotConfig colors: ColorsConfig channels: ChannelConfig dev: DevConfig emoji: EmojiConfig mention: MentionConfig snippets: SnippetConfig thread: ThreadConfig updates: UpdateConfig shell: str = None CONFIG = ModmailConfig() INTERNAL = InternalConfig()
29.113565
98
0.694983
1,095
9,229
5.705936
0.305936
0.025928
0.007202
0.011524
0.205186
0.147567
0.129641
0.129641
0.117798
0.100512
0
0.001672
0.222234
9,229
316
99
29.205696
0.868766
0.112146
0
0.069124
0
0
0.131504
0.007168
0
0
0
0.006329
0
1
0.018433
false
0
0.078341
0.004608
0.806452
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
2dfeb124e98b0f347e20cdfa1b942484c6bbcdf7
1,571
py
Python
asap-tools/experiments/depricated/handler/comparative.py
project-asap/Profiler
eaad7eafe3635a2d0881c13069a3ac632784fd3a
[ "Apache-2.0" ]
3
2017-04-18T17:09:29.000Z
2017-11-08T22:55:32.000Z
asap-tools/experiments/depricated/handler/comparative.py
project-asap/Profiler
eaad7eafe3635a2d0881c13069a3ac632784fd3a
[ "Apache-2.0" ]
18
2016-11-07T10:44:58.000Z
2017-04-25T12:40:24.000Z
asap-tools/experiments/depricated/handler/comparative.py
project-asap/Profiler
eaad7eafe3635a2d0881c13069a3ac632784fd3a
[ "Apache-2.0" ]
4
2015-12-09T09:09:59.000Z
2018-05-23T14:29:00.000Z
__author__ = 'cmantas' from tools import * # Kmeans mahout vs spark m_q = """select mahout_kmeans_text.documents/1000, mahout_kmeans_text.time/1000 from mahout_tfidf inner join mahout_kmeans_text ON mahout_tfidf.documents=mahout_kmeans_text.documents AND mahout_tfidf.dimensions=mahout_kmeans_text.dimensions where minDF=10 and k={};""" # plot_from_query(m_q.format(20), label="Mahout, k=20", title="K-Means, Mahout vs Spark", xlabel="#docs/1000", ylabel="#terms") # plot_from_query("select documents/1000, time/1000 from spark_kmeans_text WHERE k=20 and minDF=10", label="Spark, k=20") ## K-means # k=10; minDF=10 # figure() # draw_single_kmeans("weka", k, minDF,title="K-Means: WEKA, Mahout, Spark") # draw_single_kmeans("mahout", k, minDF) # draw_single_kmeans("spark", k, minDF, where_extra="spark_kmeans_text.documents<130000") # show() # exit() # tfidf figure() plot_from_query("select documents/1000, avg(time/1000) from spark_tfidf where minDF=10 and documents<130000 group by documents", label="Spark TF/IDF", xlabel="#docs/1000", ylabel="time (sec)", title="TF/IDF Performance") plot_from_query("select documents/1000, time/1000 from mahout_tfidf WHERE minDF=10", label="Mahout, minDF=10") plot_from_query("select documents/1000, time/1000 from weka_tfidf WHERE minDF=10", label="Mahout, minDF=10") figure() plot_from_query("select documents/1000, dimensions/1000 from weka_tfidf where minDF=10", title="doc freq", label="weka") plot_from_query("select documents/1000, dimensions/1000 from mahout_tfidf where minDF=10", label="mahout") show()
38.317073
128
0.760025
245
1,571
4.673469
0.220408
0.061135
0.079476
0.099563
0.379039
0.358952
0.358952
0.29869
0.262009
0
0
0.074965
0.099936
1,571
40
129
39.275
0.734795
0.330363
0
0.117647
0
0.058824
0.716908
0.160386
0
0
0
0
0
1
0
false
0
0.058824
0
0.058824
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9302285f60696162f64b949eeb7c5d5dad50cb49
37,505
py
Python
workspace/module/maya-python-2.7/LxMaya/command/maShdr.py
no7hings/Lynxi
43c745198a714c2e5aca86c6d7a014adeeb9abf7
[ "MIT" ]
2
2018-03-06T03:33:55.000Z
2019-03-26T03:25:11.000Z
workspace/module/maya-python-2.7/LxMaya/command/maShdr.py
no7hings/lynxi
43c745198a714c2e5aca86c6d7a014adeeb9abf7
[ "MIT" ]
null
null
null
workspace/module/maya-python-2.7/LxMaya/command/maShdr.py
no7hings/lynxi
43c745198a714c2e5aca86c6d7a014adeeb9abf7
[ "MIT" ]
null
null
null
# coding=utf-8 # noinspection PyUnresolvedReferences import maya.cmds as cmds from LxBasic import bscMtdCore, bscObjects, bscMethods # from LxPreset import prsConfigure, prsOutputs # from LxCore.config import appCfg # from LxCore.preset.prod import assetPr # from LxDatabase import dtbMtdCore # from LxDatabase.data import datHash # from LxMaya.command import maUtils, maAttr, maUuid, maTxtr # none = '' # DEF_mya_default_shading_engine_list = [ 'initialShadingGroup', 'initialParticleSE', 'defaultLightSet', 'defaultObjectSet' ] # def materialNodeTypeConfig(): dic = bscMtdCore.orderedDict() # majorTypes = [ 'texture', 'shader', 'utility' ] for majorType in majorTypes: nodeTypes = cmds.listNodeTypes(majorType) for nodeType in nodeTypes: dic[nodeType] = majorType return dic # def _getNodeShadingEngineNodeStringList(nodepathString): lis = [] # shapePath = maUtils._dcc_getNodShapeNodepathStr(nodepathString, 1) if not shapePath: shapePath = nodepathString # outputNodes = maUtils._getNodeTargetNodeStringList(shapePath, appCfg.DEF_mya_type_shading_engine) if outputNodes: [lis.append(i) for i in outputNodes if i not in DEF_mya_default_shading_engine_list] return lis # def getObjectsShadingEngineLis(objectLis): lis = [] if objectLis: for nodepathString in objectLis: shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString) if shadingEngineLis: [lis.append(i) for i in shadingEngineLis if i not in lis] return lis # def getObjectMaterials(nodepathString): # List [ <Material Info Nde_Node> ] materials = [] shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString) if shadingEngineLis: for shadingEngine in shadingEngineLis: inputNodes = maUtils._getNodeTargetNodeStringList(shadingEngine, 'materialInfo') if inputNodes: for inputNode in inputNodes: if not inputNode in materials: materials.append(inputNode) return materials # def getObjectsMaterials(objectLis): # List [ <Shading Engine Nde_Node> ] materials = [] if objectLis: for nodepathString in objectLis: subMaterials = getObjectMaterials(nodepathString) for material in subMaterials: if not material in materials: materials.append(material) return materials # Get Nde_ShaderRef Nodes def getConnectionNodes(material): # Sub Method def getBranch(node): inputNodes = maUtils._getNodeSourceNodeStringList(node) if inputNodes: for node in inputNodes: if node: if not node in nodes: nodes.append(node) getBranch(node) # List [ < File Nde_Node > ] nodes = [material] # Loop getBranch(material) # return nodes # def getMaterialNodes(material): exceptObjectTypes = ['mesh', 'nurbsSurface', 'nurbsCurve', 'pgYetiMaya', 'nurbsHair'] exceptNodeTypes = ['groupId', 'colorManagementGlobals'] # materialNodes = [] connectionNodes = getConnectionNodes(material) for node in connectionNodes: objectType = maUtils._getNodeShapeCategoryString(node) nodeType = maUtils._getNodeCategoryString(node) if not objectType in exceptObjectTypes and not nodeType in exceptNodeTypes: materialNodes.append(node) return materialNodes # def getTextureNodeLisByObject(objectLis): textureNodes = [] shadingEngineLis = getObjectsShadingEngineLis(objectLis) if shadingEngineLis: for shadingEngine in shadingEngineLis: nodes = getConnectionNodes(shadingEngine) if nodes: for node in nodes: nodeType = maUtils._getNodeCategoryString(node) if nodeType in appCfg.MaTexture_NodeTypeLis: if not node in textureNodes: textureNodes.append(node) return textureNodes # def getObjectsMaterialNodesRenameDic(objectLis, assetName, assetVariant, assetStage): dic = bscMtdCore.orderedDict() if objectLis: explain = u'''Get Object's Material Rename Data''' maxValue = len(objectLis) progressBar = bscObjects.ProgressWindow(explain, maxValue) for objSeq, nodepathString in enumerate(objectLis): progressBar.update() objectType = maUtils._getNodeShapeCategoryString(nodepathString) materials = getObjectMaterials(nodepathString) index = 0 if materials: for matlSeq, material in enumerate(materials): nodes = getMaterialNodes(material) if nodes: for nodSeq, node in enumerate(nodes): seq = str(index) hierarchyName = maUtils.getAttrDatum(nodepathString, prsOutputs.Util.basicHierarchyAttrLabel) if hierarchyName is None: hierarchyName = assetStage + '_' + objectType + '_' + str(objSeq) nodeType = maUtils._getNodeCategoryString(node) # nodeName = '{0}_{1}_{2}_{3}_{4}_{5}'.format( prsOutputs.Util.Lynxi_Prefix_Product_Asset, assetName, assetVariant, hierarchyName, nodeType, seq ) dic[node] = nodeName # index += 1 return dic # def setObjectsMaterialNodesRename(objectLis, assetName, assetVariant, assetStage): exceptObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair', 'aiAOV'] exceptNodeLis = ['time1', 'lambert1', 'defaultColorMgtGlobals', 'layerManager', 'renderLayerManager', assetPr.astUnitModelBridgeGroupName(assetName)] # renameDataArray = [] renameDic = getObjectsMaterialNodesRenameDic(objectLis, assetName, assetVariant, assetStage) if renameDic: for node, nodeName in renameDic.items(): objectType = maUtils._getNodeShapeCategoryString(node) if not objectType in exceptObjectTypes: if not node in exceptNodeLis: if not node == nodeName: renameDataArray.append((node, nodeName)) # if renameDataArray: # View Progress explain = u'''Rename Material - Nde_Node''' maxValue = len(renameDataArray) progressBar = bscObjects.ProgressWindow(explain, maxValue) for node, nodeName in renameDataArray: progressBar.update(nodeName) print node, nodeName maUtils.setNodeRename(node, nodeName) # def getAovNodesRenameDic(aovNodes, assetName, assetVariant): dic = bscMtdCore.orderedDict() if aovNodes: explain = u'''Get AOV's Rename Data''' maxValue = len(aovNodes) progressBar = bscObjects.ProgressWindow(explain, maxValue) for aovSeq, aov in enumerate(aovNodes): progressBar.update() nodes = getMaterialNodes(aov) if nodes: for nodSeq, node in enumerate(nodes): seq = '{0}{1}'.format( str(aovSeq + 1).zfill(3), str(nodSeq + 1).zfill(3) ) nodeType = maUtils._getNodeCategoryString(node) nodeName = '{0}_{1}_{2}_{3}'.format( assetName, assetVariant, nodeType, seq ) dic[node] = nodeName return dic # def setRenameAovNodes(aovNodes, assetName, assetVariant): exceptObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair', 'aiAOV', 'aiAOVDriver', 'aiAOVFilter'] exceptNodeLis = [ 'time1', 'lambert1', 'defaultColorMgtGlobals', 'layerManager', 'defaultArnoldDriver', 'defaultArnoldFilter' ] # renameDataArray = [] renameDic = getAovNodesRenameDic(aovNodes, assetName, assetVariant) if renameDic: for node, nodeName in renameDic.items(): objectType = maUtils._getNodeShapeCategoryString(node) if not objectType in exceptObjectTypes: if not node in exceptNodeLis: if not node == nodeName: renameDataArray.append((node, nodeName)) # if renameDataArray: # View Progress explain = u'''Rename AOV Nde_Node''' maxValue = len(renameDataArray) progressBar = bscObjects.ProgressWindow(explain, maxValue) for node, nodeName in renameDataArray: progressBar.update(nodeName) maUtils.setNodeRename(node, nodeName) # Nde_Node Data def getMaterialNodeData(material): nodesDataArray = [] nodes = getMaterialNodes(material) if nodes: for node in nodes: # Filter Unused Nde_Node Type nodeType = maUtils._getNodeCategoryString(node) definedAttrData = maAttr.getNodeDefAttrDatumLis(node) customAttrData = maAttr.getNodeUserDefAttrData(node) nodesDataArray.append((node, nodeType, definedAttrData, customAttrData)) return nodesDataArray # def getMaterialComponentData(material): composeDataArray = [] nodes = getMaterialNodes(material) if nodes: for node in nodes: # Filter Unused Nde_Node Type nodeType = maUtils._getNodeCategoryString(node) composeDataArray.append(nodeType) return composeDataArray # def getMaterialAttributeData(material): attributeDataArray = [] nodes = getMaterialNodes(material) if nodes: for node in nodes: # Filter Unused Nde_Node Type nodeType = maUtils._getNodeCategoryString(node) definedAttrData = maAttr.getNodeDefAttrDatumLis(node) customAttrData = maAttr.getNodeUserDefAttrData(node) attributeDataArray.append( (nodeType, getNodeAttrDataReduce(definedAttrData), getNodeAttrDataReduce(customAttrData)) ) return attributeDataArray # def getNodeAttrDataReduce(attrDatas): attrDataArray = [] MaTexture_AttrNameLis = maTxtr.MaTexture_AttrNameLis if attrDatas: for data in attrDatas: attrName, data, attrType, lock = data if attrName in MaTexture_AttrNameLis: isTexture = maTxtr.isOsTextureExist(data) if isTexture: data = bscMethods.OsFile.basename(data) if not isTexture: data = none attrDataArray.append((attrName, data)) return attrDataArray # Nde_Node Data def getMaterialsNodeData(materials): dic = bscMtdCore.orderedDict() if materials: for material in materials: uniqueId = maUuid._getNodeUniqueIdString(material) shaderNodeData = getMaterialNodeData(material) if shaderNodeData: dic[uniqueId] = shaderNodeData return dic # def getMaterialRelationData(material): MaAttrNameLis_ShaderExcept = [ '.groupNodes', '.dagSetMembers' ] # connectionArray = [] nodes = getConnectionNodes(material) if nodes: for node in nodes: subConnectionArray = maAttr.getNodeConnectionsDataArray(node) for sourceAttr, targetAttr in subConnectionArray: isCollection = True for exceptAttrName in MaAttrNameLis_ShaderExcept: if exceptAttrName in targetAttr: isCollection = False if isCollection: connectionArray.append((sourceAttr, targetAttr)) return connectionArray # Nde_Node Data def getMaterialsRelationData(materials): dic = bscMtdCore.orderedDict() if materials: for material in materials: uniqueId = maUuid._getNodeUniqueIdString(material) nodeConnectionData = getMaterialRelationData(material) if nodeConnectionData: dic[uniqueId] = nodeConnectionData return dic # def getMaterialComponentInfo(material): materialComponentData = getMaterialComponentData(material) return datHash.getStrHashKey(materialComponentData) # def getMaterialAttributeInfo(material): materialAttributeData = getMaterialAttributeData(material) return datHash.getStrHashKey(materialAttributeData) # def getMaterialRelationInfo(material): connections = getMaterialRelationData(material) relationData = getNodeConnectionDataReduce(connections) return datHash.getStrHashKey(relationData) # def getNodeConnectionDataReduce(connections): connectionArray = [] if connections: for sourceAttr, targetAttr in connections: if not sourceAttr.endswith('.message'): connectionArray.append((sourceAttr, targetAttr)) return connectionArray # def getMaterialsInformationData(materials): dic = bscMtdCore.orderedDict() if materials: for material in materials: uniqueId = maUuid._getNodeUniqueIdString(material) dic[uniqueId] = \ getMaterialComponentInfo(material), \ getMaterialAttributeInfo(material), \ getMaterialRelationInfo(material) return dic # def setCreateCompMaterialsNodes(materialsNodeData): if materialsNodeData: for uniqueId, nodeDataArray in materialsNodeData.items(): if nodeDataArray: keyNodeData = nodeDataArray[0] setCreateMaterialNode(keyNodeData) material = keyNodeData[0] maUuid.setMayaUniqueId(material, uniqueId) for subNodeData in nodeDataArray[0:]: setCreateMaterialNode(subNodeData) # def setCreateCompMaterialsUniqueId(materialsNodeData): if materialsNodeData: for uniqueId, nodeDataArray in materialsNodeData.items(): if nodeDataArray: keyNodeData = nodeDataArray[0] setCreateMaterialNode(keyNodeData) material = keyNodeData[0] maUuid.setMayaUniqueId(material, uniqueId) # def setCreateCompAovsNodes(materialsNodeData): if materialsNodeData: for uniqueId, nodeDataArray in materialsNodeData.items(): if nodeDataArray: keyNodeData = nodeDataArray[0] setCreateMaterialNode(keyNodeData) aovNode = keyNodeData[0] maUuid.setMayaUniqueId(aovNode, uniqueId) setCreateAovNodeLink(aovNode) for subNodeData in nodeDataArray[0:]: setCreateMaterialNode(subNodeData) # def setCreateMaterialNode(materialNodeData): node, nodeType, definedAttrData, customAttrData = materialNodeData # setCreateNode(node, nodeType, definedAttrData) # Set User Attribute maAttr.setObjectUserDefinedAttrs(node, customAttrData, lockAttribute=False) # def setCreateNode(node, nodeType, definedAttrData): shaderNodeTypeDic = materialNodeTypeConfig() # Filter Exists if not cmds.objExists(node): isShader = nodeType in shaderNodeTypeDic.keys() # Filter is Nde_ShaderRef Nde_Node if not isShader: cmds.createNode(nodeType, name=node) # if isShader: majorType = shaderNodeTypeDic[nodeType] if majorType == 'texture': cmds.shadingNode(nodeType, name=node, asTexture=1) elif majorType == 'shader': cmds.shadingNode(nodeType, name=node, asShader=1) elif majorType == 'utility': cmds.shadingNode(nodeType, name=node, asUtility=1) # Set Nde_Node Attribute maAttr.setNodeDefAttrByData(node, definedAttrData, lockAttribute=False) # def setCreateMaterialsConnections(connectionData): if connectionData: for uniqueId, connectionArray in connectionData.items(): maAttr.setCreateConnections(connectionArray) # def getMaterialEvaluateData(objectLis): exceptObjectTypes = [ 'mesh', 'pgYetiMaya', 'nurbsHair' ] # exceptNodeLis = [ 'time1', 'lambert1', 'defaultColorMgtGlobals' ] # dic = bscMtdCore.orderedDict() totalMaterials = [] totalNodes = [] totalConnections = [] if objectLis: for nodepathString in objectLis: shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString) if shadingEngineLis: for shadingEngine in shadingEngineLis: if not shadingEngine in totalMaterials: totalMaterials.append(shadingEngine) # Nde_Node nodes = getMaterialNodes(shadingEngine) if nodes: for node in nodes: objectType = maUtils._getNodeShapeCategoryString(node) if not objectType in exceptObjectTypes: if not node in exceptNodeLis: if not node in totalNodes: totalNodes.append(node) # Connection connectionArray = getMaterialRelationData(shadingEngine) if connectionArray: for connection in connectionArray: if not connection in totalConnections: totalConnections.append(connection) dic['material'] = len(totalMaterials) dic['node'] = len(totalNodes) dic['connection'] = len(totalConnections) return dic # def getObjectsMaterialRelinkData(objectLis): shaderObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair'] dic = bscMtdCore.orderedDict() for nodepathString in objectLis: linkDatumLis = [] shape = maUtils._dcc_getNodShapeNodepathStr(nodepathString, fullPath=1) shadingEngineLis = maUtils._getNodeTargetNodeStringList(shape, appCfg.DEF_mya_type_shading_engine) if shadingEngineLis: for shadingEngine in shadingEngineLis: elementSetData = cmds.sets(shadingEngine, query=1) if elementSetData: elementSetFullPathData = [i for i in cmds.ls(elementSetData, leaf=1, noIntermediate=1, long=1)] for data in elementSetFullPathData: # Object Group if data.startswith(nodepathString): showType = cmds.ls(data, showType=1)[1] if showType in shaderObjectTypes: linkData = none, shadingEngine if not linkData in linkDatumLis: linkDatumLis.append(linkData) # Component Object Group if showType == 'float3': componentObjectIndex = data.split('.')[-1] linkData = '.' + componentObjectIndex, shadingEngine if not linkData in linkDatumLis: linkDatumLis.append(linkData) dic[nodepathString] = linkDatumLis return dic # def getMaterialShadingEngine(uniqueId): material = maUuid.getObject(uniqueId) if material: shadingEngineLis = maUtils._getNodeSourceNodeStringList(material, appCfg.DEF_mya_type_shading_engine) if shadingEngineLis: return shadingEngineLis[0] # def getShadingEngineMaterialUniqueId(shadingEngine): materials = maUtils._getNodeTargetNodeStringList(shadingEngine, 'materialInfo') if materials: material = materials[0] return maUuid._getNodeUniqueIdString(material) # def getShaderObjectsObjSetDic(objectLis): dic = bscMtdCore.orderedDict() for nodepathString in objectLis: compIndex = maUuid._getNodeUniqueIdString(nodepathString) linkDatumLis = getShaderObjectObjSetSub(nodepathString) dic[compIndex] = linkDatumLis return dic # def getShaderObjectObjSetSub(nodepathString): shaderObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair'] # lis = [] # shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString) if shadingEngineLis: for shadingEngine in shadingEngineLis: compMaterialIndex = getShadingEngineMaterialUniqueId(shadingEngine) elementSetData = cmds.sets(shadingEngine, query=1) if elementSetData: elementSetFullPathData = [i for i in cmds.ls(elementSetData, leaf=1, noIntermediate=1, long=1)] for data in elementSetFullPathData: # Object Group if data.startswith(nodepathString): showType = cmds.ls(data, showType=1)[1] if showType in shaderObjectTypes: linkData = none, compMaterialIndex if not linkData in lis: lis.append(linkData) # Component Object Group if showType == 'float3': componentObjectIndex = data.split('.')[-1] linkData = '.' + componentObjectIndex, compMaterialIndex if not linkData in lis: lis.append(linkData) return lis # Link Material def setLinkObjectsMaterial(data, objectNamespace=none, materialNamespace=none): if data: # View Progress explain = u'''Link / Relink Material''' maxValue = len(data) progressBar = bscObjects.ProgressWindow(explain, maxValue) for nodepathString, linkDatumLis in data.items(): # In Progress progressBar.update() # usedObject = \ [nodepathString, maUtils.getObjectPathJoinNamespace(nodepathString, objectNamespace)][objectNamespace != none] # if linkDatumLis: # Clear >>> 01 setObjectCleanTransformShadingEngine(usedObject) setObjectCleanShapeShadingEngine(usedObject) # Link >>> 02 isComponentLink = len(linkDatumLis) > 1 # if not isComponentLink: componentObjectIndex, shadingEngine = linkDatumLis[0] usedShadingEngine = [shadingEngine, maUtils.getNodeJoinNamespace(shadingEngine, materialNamespace)][materialNamespace != none] setObjectAssignMaterial(usedObject, none, usedShadingEngine) # if isComponentLink: for componentObjectIndex, shadingEngine in linkDatumLis: usedShadingEngine = [shadingEngine, maUtils.getNodeJoinNamespace(shadingEngine, materialNamespace)][materialNamespace != none] setObjectAssignMaterial(usedObject, componentObjectIndex, usedShadingEngine) # Link Material def setMaterialsObjectSetsConnect(datumDic): if datumDic: # View Progress explain = u'''Connect Material's Object Set(s)''' maxValue = len(datumDic) progressBar = bscObjects.ProgressWindow(explain, maxValue) for compIndex, linkDatumLis in datumDic.items(): progressBar.update() # setMaterialObjectSetConnect(compIndex, linkDatumLis) # def setMaterialObjectSetConnect(compIndex, linkDatumLis): nodepathString = maUuid.getObject(compIndex, fullPath=1) if nodepathString: # Clear >>> 01 if linkDatumLis: setObjectCleanTransformShadingEngine(nodepathString) setObjectCleanShapeShadingEngine(nodepathString) # Link >>> 02 isComponentLink = len(linkDatumLis) > 1 # if not isComponentLink: componentObjectIndex, compMaterialIndex = linkDatumLis[0] shadingEngine = getMaterialShadingEngine(compMaterialIndex) setObjectAssignMaterial(nodepathString, None, shadingEngine) # else: for componentObjectIndex, compMaterialIndex in linkDatumLis: shadingEngine = getMaterialShadingEngine(compMaterialIndex) setObjectAssignMaterial(nodepathString, componentObjectIndex, shadingEngine) # def setObjectCleanShadingEngine(nodepathString): setObjectCleanTransformShadingEngine(nodepathString) setObjectCleanShapeShadingEngine(nodepathString) # def setObjectCleanShapeShadingEngine(nodepathString): shape = maUtils._dcc_getNodShapeNodepathStr(nodepathString) shapeShadingEngines = maUtils._getNodeTargetNodeStringList(shape, appCfg.DEF_mya_type_shading_engine) if shapeShadingEngines: [cmds.sets(shape, remove=i) for i in shapeShadingEngines] # def setObjectCleanTransformShadingEngine(nodepathString): outputConnections = maUtils.getNodeOutputConnectionLis(nodepathString) if outputConnections: for sourceAttr, targetAttr in outputConnections: if sourceAttr.endswith('instObjGroups'): maUtils.setAttrDisconnect(sourceAttr, targetAttr) # def setObjectDefaultShadingEngine(nodepathString): shape = maUtils._dcc_getNodShapeNodepathStr(nodepathString) shadingEngineLis = maUtils._getNodeTargetNodeStringList(shape, appCfg.DEF_mya_type_shading_engine) if not shadingEngineLis: cmds.sets(shape, forceElement='initialShadingGroup') # def setObjectsDefaultShadingEngine(componentObjectIndexes): for componentObjectIndex in componentObjectIndexes: nodepathString = maUuid.getObject(componentObjectIndex) setObjectDefaultShadingEngine(nodepathString) # def setObjectAssignMaterial(nodepathString, componentObjectIndex, shadingEngine): if componentObjectIndex is None: linkObject = maUtils._dcc_getNodShapeNodepathStr(nodepathString, 1) else: linkObject = nodepathString + componentObjectIndex # if maUtils._isAppExist(linkObject): if maUtils._isAppExist(shadingEngine): cmds.sets(linkObject, forceElement=shadingEngine) setCreateLightLink(shadingEngine) else: cmds.sets(linkObject, forceElement='initialShadingGroup') # def setCreateLightLink(shadingEngine): def getUsedConnectionIndex(): for i in range(5000): if isUsedPartitionConnectionIndex(i) \ and isUsedObjectLinkConnectionIndex(i) \ and isUsedShadowObjectLinkConnectionIndex(i) \ and isUsedLightLinkConnectionIndex(i) \ and isUsedShadowLightLinkConnectionIndex(i): return i # def isUsedConnection(connection): boolean = False if cmds.objExists(connection): if not cmds.connectionInfo(connection, isDestination=1): boolean = True return boolean # def isUsedPartitionConnectionIndex(index): connection = appCfg.MaRenderPartition + '.sets[%s]' % index return isUsedConnection(connection) # def isUsedObjectLinkConnectionIndex(index): connection = appCfg.MaNodeName_LightLink + '.link[%s].object' % index return isUsedConnection(connection) # def isUsedShadowObjectLinkConnectionIndex(index): connection = appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowObject' % index return isUsedConnection(connection) # def isUsedLightLinkConnectionIndex(index): connection = appCfg.MaNodeName_LightLink + '.link[%s].light' % index return isUsedConnection(connection) # def isUsedShadowLightLinkConnectionIndex(index): connection = appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowLight' % index return isUsedConnection(connection) # def setMain(): index = getUsedConnectionIndex() if index: # Debug ( Repeat ) if not cmds.connectionInfo(shadingEngine + '.partition', isSource=1): cmds.connectAttr(shadingEngine + '.partition', appCfg.MaRenderPartition + '.sets[%s]' % index) cmds.connectAttr(shadingEngine + '.message', appCfg.MaNodeName_LightLink + '.link[%s].object' % index) cmds.connectAttr(shadingEngine + '.message', appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowObject' % index) cmds.connectAttr(appCfg.MaNodeName_DefaultLightSet + '.message', appCfg.MaNodeName_LightLink + '.link[%s].light' % index) cmds.connectAttr(appCfg.MaNodeName_DefaultLightSet + '.message', appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowLight' % index) # setMain() # def getAovNodeLis(renderer): aovNodes = [] if renderer == prsConfigure.Utility.DEF_value_renderer_arnold: aovNodes = getArnoldAovNodeLis() elif renderer == prsConfigure.Utility.DEF_value_renderer_redshift: aovNodes = getRedshiftAovNodes() return aovNodes # Get Arnold's Aov def getArnoldAovNodeLis(): lis = [] if maUtils.isArnoldEnable(): lis = maUtils._getNodeSourceNodeStringList('defaultArnoldRenderOptions', 'aiAOV') return lis # Get Redshift's Aov def getRedshiftAovNodes(): lis = [] if maUtils.isRedshiftEnable(): lis = maUtils.getNodeLisByType('RedshiftAOV') return lis # def getAovNodesData(renderer): aovNodesData = bscMtdCore.orderedDict() if renderer == 'Arnold': aovNodesData = getArnoldAovNodesData() if renderer == 'Redshift': aovNodesData = getRedshiftAovNodesData() return aovNodesData # def getArnoldAovNodesData(): dic = bscMtdCore.orderedDict() aovNodes = getArnoldAovNodeLis() if aovNodes: for aovNode in aovNodes: aovName = maUtils.getAttrDatum(aovNode, 'name') dic[aovNode] = aovName return dic # def getRedshiftAovNodesData(): dic = bscMtdCore.orderedDict() aovNodes = getRedshiftAovNodes() if aovNodes: for aovNode in aovNodes: aovName = maUtils.getAttrDatum(aovNode, 'name') dic[aovNode] = aovName return dic # noinspection PyUnresolvedReferences def getArnoldOption(): if prsMethods.Project.isMayaUsedArnoldRenderer(): try: import mtoa.core as core # core.createOptions() except:pass # def setCreateAovNodeLink(aovNode, maxDepth=50): def getAovListAttr(): aovListAttrs = ['%s.aovList[%s]' % ('defaultArnoldRenderOptions', i) for i in range(0, maxDepth)] for aovListAttr in aovListAttrs: if maUtils._isAppExist(aovListAttr): if not maAttr.isAttrDestination(aovListAttr): return aovListAttr # def setMain(): aovMessageAttr = aovNode + '.message' if maUtils._isAppExist(aovMessageAttr): if not maAttr.isAttrSource(aovMessageAttr): aovListAttr = getAovListAttr() if aovListAttr: cmds.connectAttr(aovMessageAttr, aovListAttr) # setMain() @dtbMtdCore.fncThreadSemaphoreModifier def setRepairAovNodesLink(): getArnoldOption() aovs = maUtils.getNodeLisByType('aiAOV') if aovs: [setCreateAovNodeLink(i) for i in aovs] # def setRepairArnoldAov(aovNodes=None): if not aovNodes: aovNodes = cmds.ls(type='aiAOV') # defDriverAttr = 'defaultArnoldDriver.message' defFilterAttr = 'defaultArnoldFilter.message' if aovNodes: for aovNode in aovNodes: outputDriverAttr = aovNode + '.outputs[0].driver' outputFilterAttr = aovNode + '.outputs[0].filter' inputConnections = maUtils.getNodeInputConnectionLis(aovNode) for inputAttr, outputAttr in inputConnections: if outputAttr == outputDriverAttr: if inputAttr != defDriverAttr: maUtils.setAttrDisconnect(inputAttr, outputAttr) maUtils.setAttrConnect(defDriverAttr, outputDriverAttr) # if outputAttr == outputFilterAttr: if inputAttr != defFilterAttr: maUtils.setAttrDisconnect(inputAttr, outputAttr) maUtils.setAttrConnect(defFilterAttr, outputFilterAttr) else: if not cmds.isConnected(defDriverAttr, outputDriverAttr): cmds.connectAttr(defDriverAttr, outputDriverAttr) # if not cmds.isConnected(defFilterAttr, outputFilterAttr): cmds.connectAttr(defFilterAttr, outputFilterAttr) # def getObjectsAttrData(objectLis): dic = bscMtdCore.orderedDict() # if objectLis: for nodepathString in objectLis: objectShape = maUtils._dcc_getNodShapeNodepathStr(nodepathString) uniqueId = maUuid._getNodeUniqueIdString(nodepathString) renderAttrData = maAttr.getNodeRenderAttrData(objectShape) plugAttrData = maAttr.getNodePlugAttrData(objectShape) customAttrData = maAttr.getNodeUserDefAttrData(objectShape) dic[uniqueId] = renderAttrData, plugAttrData, customAttrData return dic # def setObjectsAttrsCreate(datumDic): if datumDic: # View Progress explain = u'''Set Material's Object Attribute(s)''' maxValue = len(datumDic) progressBar = bscObjects.ProgressWindow(explain, maxValue) for uniqueId, attrData in datumDic.items(): progressBar.update() # nodepathString = maUuid.getObject(uniqueId) if nodepathString: setObjectAttrsCreate(nodepathString, attrData) # def setObjectAttrsCreate(nodepathString, attrData): if attrData: objectShape = maUtils._dcc_getNodShapeNodepathStr(nodepathString, 1) renderAttrData, plugAttrData, customAttrData = attrData if renderAttrData: maAttr.setNodeDefAttrByData(objectShape, renderAttrData) if plugAttrData: maAttr.setNodeDefAttrByData(objectShape, plugAttrData) if customAttrData: maAttr.setObjectUserDefinedAttrs(objectShape, customAttrData) # def setRefreshTextureColorSpace(textureNodes): if textureNodes: for i in textureNodes: colorSpace = maUtils.getAttrDatum(i, 'colorSpace') if not colorSpace == 'sRGB': maUtils.setAttrDatumForce_(i, 'ignoreColorSpaceFileRules', 1) # def setArnoldShaderCovert(nodepathString, texturePath): nodeTypeLis = [ 'aiStandardSurface' ] shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString) if shadingEngineLis: for shadingEngine in shadingEngineLis: targetAttr0 = maUtils._toNodeAttr([shadingEngine, 'surfaceShader']) stringLis = maUtils.getInputNodeLisByAttr(targetAttr0) if stringLis: nodeName = stringLis[0] nodeType = maUtils._getNodeCategoryString(nodeName) if nodeType in nodeTypeLis: sourceAttr0 = maUtils._toNodeAttr([nodeName, 'outColor']) targetAttr1 = maUtils._toNodeAttr([shadingEngine, 'aiSurfaceShader']) # cmds.disconnectAttr(sourceAttr0, targetAttr0) # if not cmds.isConnected(sourceAttr0, targetAttr1): cmds.connectAttr(sourceAttr0, targetAttr1) # colorShaderNodeName = shadingEngine + 'cls_colorShader' if not cmds.objExists(colorShaderNodeName): cmds.shadingNode('blinn', n=colorShaderNodeName, asShader=True) # sourceAttr2 = maUtils._toNodeAttr([colorShaderNodeName, 'outColor']) cmds.connectAttr(sourceAttr2, targetAttr0) # if nodeType == 'aiStandardSurface': inputAttr = maUtils._toNodeAttr([nodeName, 'baseColor']) stringLis = maUtils.getInputAttrByAttr(inputAttr) if stringLis: textureNodeName = shadingEngine + 'CLS_color' # texture = texturePath + '/' + textureNodeName + '.jpg' attr = stringLis[0] cmds.convertSolidTx( attr, name=textureNodeName, resolutionX=1024, resolutionY=1024, samplePlane=1, fileImageName=texture, fileFormat='jpg' ) # cmds.connectAttr(textureNodeName + '.outColor', colorShaderNodeName + '.color')
36.878073
153
0.627916
2,740
37,505
8.543431
0.167883
0.008117
0.012303
0.013755
0.37413
0.306976
0.265112
0.238413
0.224614
0.191636
0
0.004075
0.29996
37,505
1,016
154
36.91437
0.887526
0.019731
0
0.371812
0
0.001342
0.041621
0.009613
0
0
0
0
0
0
null
null
0.001342
0.012081
null
null
0.001342
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
9302303524c2d1d2dcea444d0b37ee6e17561f6a
441
py
Python
db/Service.py
hamedsh/healthCheck
8f6b8ffffc1f1d8849a58b4966e54d30ead9556b
[ "Apache-2.0" ]
null
null
null
db/Service.py
hamedsh/healthCheck
8f6b8ffffc1f1d8849a58b4966e54d30ead9556b
[ "Apache-2.0" ]
null
null
null
db/Service.py
hamedsh/healthCheck
8f6b8ffffc1f1d8849a58b4966e54d30ead9556b
[ "Apache-2.0" ]
null
null
null
import json class Service(object): id: int = None name: str = None type: int = None type_name: str = None repeat_period: int = 5 # repeat period by second metadata = {} def __init__(self, arr: list): self.id = arr[0] self.name = arr[1] self.type = arr[2] self.type_name = arr[3] self.repeat_period = arr[4] self.metadata = json.loads(arr[5].replace("'", '"'))
25.941176
60
0.557823
61
441
3.901639
0.47541
0.151261
0.092437
0
0
0
0
0
0
0
0
0.022951
0.30839
441
16
61
27.5625
0.757377
0.052154
0
0
0
0
0.004808
0
0
0
0
0
0
1
0.066667
false
0
0.066667
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
9303234b67903102cb5d3258f289b230610a2cf6
801
py
Python
analyse_images.py
aalto-ui/SemanticCollage
61b4f241aef6c029a634f0d70a2a35799db46076
[ "MIT" ]
null
null
null
analyse_images.py
aalto-ui/SemanticCollage
61b4f241aef6c029a634f0d70a2a35799db46076
[ "MIT" ]
4
2021-06-08T21:23:18.000Z
2022-03-12T00:25:45.000Z
analyse_images.py
aalto-ui/SemanticCollage
61b4f241aef6c029a634f0d70a2a35799db46076
[ "MIT" ]
1
2021-04-21T03:41:32.000Z
2021-04-21T03:41:32.000Z
# encoding=utf8 from load_to_db import * save_list = [] import random import string def randomString(url, stringLength=20): """Generate a random string of fixed length """ Letters = string.ascii_lowercase + string.ascii_uppercase + string.digits url_split = url.split(".") format_ = url_split[-1] s = ''.join(random.choice(Letters) for i in range(stringLength)) + format_ if alreadyNameInDB(s): randomString(url) else: return s def alreadyNameInDB(name): connection = psycopg2.connect(settings.DATABASE_CONNECTION_STRING) # connection = psycopg2.connect(database='Inspiration', user='research') cur = connection.cursor() cur.execute("SELECT name FROM images WHERE name = '%s';".format(name, )) return cur.fetchone() is not None
25.83871
78
0.694132
98
801
5.561224
0.602041
0.044037
0.091743
0
0
0
0
0
0
0
0
0.009245
0.189763
801
30
79
26.7
0.830508
0.158552
0
0
1
0
0.064662
0
0
0
0
0
0
1
0.111111
false
0
0.166667
0
0.388889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9304861c31e91f902b6f121f6ca51fab98031cbe
3,305
py
Python
src/mod_stats_by_aircraft/background_jobs/background_job.py
FGlazov/IL2Stats_ByAircraftMod
acf66eb3f31a7e789d61a5c60d32fd30dbcedf4b
[ "MIT" ]
null
null
null
src/mod_stats_by_aircraft/background_jobs/background_job.py
FGlazov/IL2Stats_ByAircraftMod
acf66eb3f31a7e789d61a5c60d32fd30dbcedf4b
[ "MIT" ]
null
null
null
src/mod_stats_by_aircraft/background_jobs/background_job.py
FGlazov/IL2Stats_ByAircraftMod
acf66eb3f31a7e789d61a5c60d32fd30dbcedf4b
[ "MIT" ]
null
null
null
from django.core.exceptions import FieldError from django.db import ProgrammingError from stats.models import Tour, Sortie from django.db.models import Max import config RETRO_COMPUTE_FOR_LAST_TOURS = config.get_conf()['stats'].getint('retro_compute_for_last_tours') if RETRO_COMPUTE_FOR_LAST_TOURS is None: RETRO_COMPUTE_FOR_LAST_TOURS = 10 def get_tour_cutoff(): max_id = Tour.objects.aggregate(Max('id'))['id__max'] if max_id is None: # Edge case: No tour yet return None return max_id - RETRO_COMPUTE_FOR_LAST_TOURS class BackgroundJob: def __init__(self): tour_cutoff = get_tour_cutoff() try: if tour_cutoff is not None: self.work_left = self.query_find_sorties(get_tour_cutoff()).count() > 0 else: self.work_left = False except FieldError: pass # Likely that update.cmd is running. Otherwise this will cause another error later on. except ProgrammingError: pass # Likely that update.cmd is running. Otherwise this will cause another error later on. self.unlimited_work = False # Marker for a continuous job which always gets extra work. """Abstract class which represents a job to be done in the background in stats.cmd while there is no new mission to be processed. This includes fixing corrupted data due to bugs, and retroactively computing aircraft stats, as well as filling in missing fields which were added in an update to the aircraft stats system. """ def query_find_sorties(self, tour_cutoff): """ Finds the sorties which need to be worked on. @param tour_cutoff The first tour that should be searched. @returns A django QuerySet which will find all the Sorties which need to be processed for this job. """ print("[mod_stats_by_aircraft]: WARNING: Programing Error unimplemented background job query find.") return Sortie.objects.none() def compute_for_sortie(self, sortie): """ Does the necessary computations on a single sortie found by query_find_sorties. @param sortie Sortie as found by query_find_sorties. """ print("[mod_stats_by_aircraft]: WARNING: Programing Error unimplemented background job one sortie.") def log_update(self, to_compute): """ Message which shows a status update on how many sorties left to compute for this job. Printed to stats.cmd output. @param to_compute Nr of sorties found by compute_for_sortie method. """ return "[mod_stats_by_aircraft]: WARNING: Programming error, unimplemented logs starting method." def log_done(self): """ Message shown when this job is done. Printed to stats.cmd output. """ return "[mod_stats_by_aircraft]: WARNING: Programming error, unimplemented logs done method." def reset_relevant_fields(self, tour_cutoff): """ Optional method. This is a job done before any new sortie is processed. Used in corrupted data fixing background jobs which have to make sure that data is reset before a new mission is processed in, so that the relevant fields in new mission are at least correct. """ pass
39.819277
120
0.69289
461
3,305
4.806941
0.347072
0.036101
0.033845
0.04287
0.304152
0.208484
0.187726
0.187726
0.187726
0.187726
0
0.001206
0.247504
3,305
82
121
40.304878
0.889827
0.334644
0
0.081081
0
0
0.235574
0.073766
0
0
0
0
0
1
0.189189
false
0.081081
0.135135
0
0.486486
0.054054
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
9306105a7d6f94715dda6856227c83b2b5fdd102
2,016
py
Python
peakinvestigator/actions/run.py
jct197/PeakInvestigator-Python-SDK
75d58d67208deff07b6e366e0b34d37570b46bf5
[ "BSD-3-Clause" ]
null
null
null
peakinvestigator/actions/run.py
jct197/PeakInvestigator-Python-SDK
75d58d67208deff07b6e366e0b34d37570b46bf5
[ "BSD-3-Clause" ]
4
2019-03-14T03:01:05.000Z
2019-10-16T20:51:03.000Z
peakinvestigator/actions/run.py
jct197/PeakInvestigator-Python-SDK
75d58d67208deff07b6e366e0b34d37570b46bf5
[ "BSD-3-Clause" ]
1
2019-01-05T03:50:02.000Z
2019-01-05T03:50:02.000Z
## -*- coding: utf-8 -*- # # Copyright (c) 2016, Veritomyx, Inc. # # This file is part of the Python SDK for PeakInvestigator # (http://veritomyx.com) and is distributed under the terms # of the BSD 3-Clause license. from .base import BaseAction class RunAction(BaseAction): """This class is used to make a RUN call to the PeakInvestigator API. See https://peakinvestigator.veritomyx.com/api/#RUN. It is constructed with a Fluent API because of the number of required arguments. """ def __init__(self, version, username, password, jobID, response_time_objective): """Constructor """ super(RunAction,self).__init__(version, username, password) self._jobID = jobID self._response_time_objective = response_time_objective def with_files(self, *args, **kwds): """Specify the production and calibration data files using either function arguments or keywords. First try keywords. If those are missing, use args[0] for production and args[1] for calibration, if it exists. """ if "production" in kwds: self._production = kwds["production"] else: self._production = args[0] if "calibration" in kwds: self._calibration = kwds["calibration"] elif len(args) == 2: self._calibration = args[1] return self def build_query(self): query = super(RunAction,self).build_query() query["Action"] = "RUN" query["Job"] = self._jobID query["RTO"] = self._response_time_objective query["InputFile"] = self._production if hasattr(self, "_calibration"): query["CalibrationFile"] = self._calibration return query @property def job(self): super(RunAction,self).precheck() return self._data["Job"]
28
80
0.590278
221
2,016
5.244344
0.457014
0.041415
0.072476
0.043141
0
0
0
0
0
0
0
0.007954
0.313988
2,016
71
81
28.394366
0.83008
0.318948
0
0
0
0
0.075353
0
0
0
0
0
0
1
0.133333
false
0.066667
0.033333
0
0.3
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
930b6911eb3b99feb00dad0f3f43696dfc1d21e0
27,385
py
Python
test_cnlunardate.py
YuBPan/cnlunardate
c19aa9821aecadea6647ac026bf233ab9f7cff90
[ "MIT" ]
null
null
null
test_cnlunardate.py
YuBPan/cnlunardate
c19aa9821aecadea6647ac026bf233ab9f7cff90
[ "MIT" ]
1
2020-01-10T05:06:27.000Z
2020-01-10T05:06:27.000Z
test_cnlunardate.py
YuBPan/cnlunardate
c19aa9821aecadea6647ac026bf233ab9f7cff90
[ "MIT" ]
null
null
null
"""Test cnlunardate.""" import unittest import pickle from cnlunardate import cnlunardate from cnlunardate import MIN_YEAR, MAX_YEAR from datetime import timedelta pickle_loads = {pickle.loads, pickle._loads} pickle_choices = [(pickle, pickle, proto) for proto in range(pickle.HIGHEST_PROTOCOL + 1)] assert len(pickle_choices) == pickle.HIGHEST_PROTOCOL + 1 class TestCnlunardateOnly(unittest.TestCase): def test_delta_non_days_ignored(self): dt = cnlunardate(2000, 1, 2) delta = timedelta(days=1) days = timedelta(delta.days) self.assertEqual(days, timedelta(1)) dt2 = dt + delta self.assertEqual(dt2, dt + days) dt2 = delta + dt self.assertEqual(dt2, dt + days) dt2 = dt - delta self.assertEqual(dt2, dt - days) delta = -delta days = timedelta(delta.days) self.assertEqual(days, timedelta(-1)) dt2 = dt + delta self.assertEqual(dt2, dt + days) dt2 = delta + dt self.assertEqual(dt2, dt + days) dt2 = dt - delta self.assertEqual(dt2, dt - days) class SubclassDate(cnlunardate): sub_var = 1 class TestCnlunardate(unittest.TestCase): theclass = cnlunardate def test_basic_attributes(self): dt = self.theclass(2017, 6, 1, True) self.assertEqual(dt.year, 2017) self.assertEqual(dt.month, 6) self.assertEqual(dt.day, 1) self.assertEqual(dt.isLeapMonth, True) def test_roundtrip(self): for dt in (self.theclass(1900, 1, 1), self.theclass.today()): # Verify dt -> string -> cnlunardate identity. s = repr(dt) self.assertTrue(s.startswith("cnlunardate.")) s = s[len("cnlunardate."):] dt2 = eval(s) self.assertEqual(dt, dt2) # Verify identity via reconstructing from pieces. dt2 = self.theclass(dt.year, dt.month, dt.day) self.assertEqual(dt, dt2) def test_ordinal_conversions(self): # Check some fixed values. for y, m, d, n in [(1900, 1, 1, 693626), # 1900, 1, 31 (1945, 10, 8, 710347), # 1945, 11, 12 (2100, 12, 1, 767009)]: # 2100, 12, 31 d = self.theclass(y, m, d) self.assertEqual(n, d.toordinal()) fromord = self.theclass.fromordinal(n) self.assertEqual(d, fromord) # Check first and last days of year spottily across the whole # range of years supported. for year in range(MIN_YEAR+1, MAX_YEAR+1, 7): # Verify (year, 1, 1) -> ordinal -> y, m, d, l is identity. d = self.theclass(year, 1, 1) n = d.toordinal() d2 = self.theclass.fromordinal(n) self.assertEqual(d, d2) # Verify that moving back a day gets to the end of year-1. if year > 1: d = self.theclass.fromordinal(n-1) try: d2 = self.theclass(year-1, 12, 30) except ValueError: d2 = self.theclass(year-1, 12, 29) self.assertEqual(d, d2) self.assertEqual(d2.toordinal(), n-1) # Test every day in a year with and without leap month. for year, dim, hasLeapMonth, leapMonth, leapMonthDays in \ (2017, [29, 30, 29, 30, 29, 29, 29, 30, 29, 30, 30, 30], True, 6, 30), \ (2018, [29, 30, 29, 30, 29, 29, 30, 29, 30, 29, 30, 30], False, -1, -1): n = self.theclass(year, 1, 1).toordinal() for month, maxday in zip(range(1, len(dim)+1), dim): for day in range(1, maxday+1): d = self.theclass(year, month, day) self.assertEqual(d.toordinal(), n) self.assertEqual(d, self.theclass.fromordinal(n)) n += 1 if hasLeapMonth and month == leapMonth: for day in range(1, leapMonthDays+1): d = self.theclass(year, month, day, True) self.assertEqual(d.toordinal(), n) self.assertEqual(d, self.theclass.fromordinal(n)) n += 1 def test_extreme_ordinals(self): a = self.theclass.min a = self.theclass(a.year, a.month, a.day) aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1)) b = a + timedelta(days=1) self.assertEqual(b.toordinal(), aord + 1) self.assertEqual(b, self.theclass.fromordinal(aord + 1)) a = self.theclass.max a = self.theclass(a.year, a.month, a.day) aord = a.toordinal() b = a.fromordinal(aord) self.assertEqual(a, b) self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1)) b = a - timedelta(days=1) self.assertEqual(b.toordinal(), aord - 1) self.assertEqual(b, self.theclass.fromordinal(aord - 1)) def test_bad_constructor_arguments(self): # missing arguments self.assertRaises(TypeError, self.theclass) self.assertRaises(TypeError, self.theclass, MIN_YEAR) self.assertRaises(TypeError, self.theclass, MIN_YEAR, 1) # bad years self.theclass(MIN_YEAR, 1, 1) self.theclass(MAX_YEAR, 1, 1) self.assertRaises(ValueError, self.theclass, MIN_YEAR-1, 1, 1) self.assertRaises(ValueError, self.theclass, MAX_YEAR+1, 1, 1) # bad months self.theclass(2017, 1, 1) self.theclass(2017, 12, 1) self.assertRaises(ValueError, self.theclass, 2017, 0, 1) self.assertRaises(ValueError, self.theclass, 2017, 13, 1) # bad days self.theclass(2017, 1, 29) self.theclass(2017, 6, 29) self.theclass(2017, 6, 30, True) self.assertRaises(ValueError, self.theclass, 2017, 1, 0) self.assertRaises(ValueError, self.theclass, 2017, 1, 30) self.assertRaises(ValueError, self.theclass, 2017, 6, 30) self.assertRaises(ValueError, self.theclass, 2017, 6, 31, True) # bad isLeapMonth self.theclass(2017, 1, 1) self.assertRaises(ValueError, self.theclass, 2017, 1, 1, True) self.assertRaises(ValueError, self.theclass, 2017, 6, 30) # min and max self.theclass(MIN_YEAR, 1, 1) self.theclass(MAX_YEAR, 12, 1) self.assertRaises(ValueError, self.theclass, MIN_YEAR-1, 12, 30) self.assertRaises(ValueError, self.theclass, MAX_YEAR, 12, 2) def test_bad_constructor_arguments_typeerror(self): # non-expected arguments self.assertRaises(TypeError, self.theclass, 2017.0, 1, 1) self.assertRaises(TypeError, self.theclass, 2017, 1.0, 1) self.assertRaises(TypeError, self.theclass, 2017, 1, 1.0) self.assertRaises(TypeError, self.theclass, 2017, 1, 1, "non-bool type") # int __index__ class IntIndex: def __init__(self, i): self.i = i def __index__(self): return self.i self.theclass(IntIndex(2017), 1, 1) self.theclass(2017, IntIndex(1), 1) self.theclass(2017, 1, IntIndex(1)) # non-int __index__ class NonIntIndex: def __index__(self): return 1.0 arg = NonIntIndex() self.assertRaises(TypeError, self.theclass, arg, 1, 1) self.assertRaises(TypeError, self.theclass, 1, arg, 1) self.assertRaises(TypeError, self.theclass, 1, 1, arg) # int __int__ class IntInt: def __init__(self, i): self.i = i def __int__(self): return self.i self.theclass(IntInt(2017), 1, 1) self.theclass(2017, IntInt(1), 1) self.theclass(2017, 1, IntInt(1)) # non-int __int__ class NonIntInt: def __int__(self): return 1.0 arg = NonIntInt() self.assertRaises(TypeError, self.theclass, arg, 1, 1) self.assertRaises(TypeError, self.theclass, 1, arg, 1) self.assertRaises(TypeError, self.theclass, 1, 1, arg) # bool __bool__ class BoolBool: def __bool__(self): return True self.theclass(2017, 6, 1, BoolBool()) # non-bool __bool__ class NonBoolBool: def __bool__(self): return 1.0 arg = NonBoolBool() self.assertRaises(TypeError, self.theclass, 2017, 6, 1, arg) def test_hash_equality(self): d = self.theclass(2017, 1, 1) # same thing e = self.theclass(2017, 1, 1) self.assertEqual(d, e) self.assertEqual(hash(d), hash(e)) dic = {d: 1} dic[e] = 2 self.assertEqual(len(dic), 1) self.assertEqual(dic[d], 2) self.assertEqual(dic[e], 2) d = self.theclass(2017, 6, 30, True) # same thing e = self.theclass(2017, 6, 30, True) self.assertEqual(d, e) self.assertEqual(hash(d), hash(e)) dic = {d: 1} dic[e] = 2 self.assertEqual(len(dic), 1) self.assertEqual(dic[d], 2) self.assertEqual(dic[e], 2) # different on isLeapMonth self.assertNotEqual(hash(self.theclass(2017, 6, 29)), hash(self.theclass(2017, 6, 29, True))) def test_computations(self): a = self.theclass(2002, 1, 30) b = self.theclass(1956, 1, 29) c = self.theclass(2001, 2, 1) diff = a-b self.assertEqual(diff.days, 16803) self.assertEqual(diff.seconds, 0) self.assertEqual(diff.microseconds, 0) day = timedelta(1) week = timedelta(7) a = self.theclass(2002, 3, 2) self.assertEqual(a + day, self.theclass(2002, 3, 3)) self.assertEqual(day + a, self.theclass(2002, 3, 3)) self.assertEqual(a - day, self.theclass(2002, 3, 1)) self.assertEqual(-day + a, self.theclass(2002, 3, 1)) self.assertEqual(a + week, self.theclass(2002, 3, 9)) self.assertEqual(a - week, self.theclass(2002, 2, 25)) self.assertEqual(a + 52*week, self.theclass(2003, 3, 12)) self.assertEqual(a - 52*week, self.theclass(2001, 3, 22)) self.assertEqual((a + week) - a, week) self.assertEqual((a + day) - a, day) self.assertEqual((a - week) - a, -week) self.assertEqual((a - day) - a, -day) self.assertEqual(a - (a + week), -week) self.assertEqual(a - (a + day), -day) self.assertEqual(a - (a - week), week) self.assertEqual(a - (a - day), day) self.assertEqual(c - (c - day), day) a = self.theclass(2017, 6, 2, True) self.assertEqual(a + day, self.theclass(2017, 6, 3, True)) self.assertEqual(day + a, self.theclass(2017, 6, 3, True)) self.assertEqual(a - day, self.theclass(2017, 6, 1, True)) self.assertEqual(-day + a, self.theclass(2017, 6, 1, True)) self.assertEqual(a + week, self.theclass(2017, 6, 9, True)) self.assertEqual(a - week, self.theclass(2017, 6, 24)) self.assertEqual(a + 52*week, self.theclass(2018, 6, 11)) self.assertEqual(a - 52*week, self.theclass(2016, 6, 22)) self.assertEqual((a + week) - a, week) self.assertEqual((a + day) - a, day) self.assertEqual((a - week) - a, -week) self.assertEqual((a - day) - a, -day) self.assertEqual(a - (a + week), -week) self.assertEqual(a - (a + day), -day) self.assertEqual(a - (a - week), week) self.assertEqual(a - (a - day), day) self.assertEqual(c - (c - day), day) # Add/sub ints or floats should be illegal for i in 1, 1.0: self.assertRaises(TypeError, lambda: a+i) self.assertRaises(TypeError, lambda: a-i) self.assertRaises(TypeError, lambda: i+a) self.assertRaises(TypeError, lambda: i-a) # delta - cnlunardate is senseless. self.assertRaises(TypeError, lambda: day - a) # mixing cnlunardate and (delta or cnlunardate) via * or // is senseless self.assertRaises(TypeError, lambda: day * a) self.assertRaises(TypeError, lambda: a * day) self.assertRaises(TypeError, lambda: day // a) self.assertRaises(TypeError, lambda: a // day) self.assertRaises(TypeError, lambda: a * a) self.assertRaises(TypeError, lambda: a // a) # cnlunardate + cnlunardate is senseless self.assertRaises(TypeError, lambda: a + a) def test_overflow(self): tiny = self.theclass.resolution for delta in [tiny, timedelta(1), timedelta(2)]: dt = self.theclass.min + delta dt -= delta # no problem self.assertRaises(OverflowError, dt.__sub__, delta) self.assertRaises(OverflowError, dt.__add__, -delta) dt = self.theclass.max - delta dt += delta # no problem self.assertRaises(OverflowError, dt.__add__, delta) self.assertRaises(OverflowError, dt.__sub__, -delta) def test_fromtimestamp(self): import time # Try an arbitrary fixed value. ts = time.mktime((1999, 9, 19, 0, 0, 0, 0, 0, -1)) d = self.theclass.fromtimestamp(ts) self.assertEqual(d.year, 1999) self.assertEqual(d.month, 8) self.assertEqual(d.day, 10) self.assertEqual(d.isLeapMonth, False) def test_insane_fromtimestamp(self): # It's possible that some platform maps time_t to double, # and that this test will fail there. This test should # exempt such platforms (provided they return reasonable # results!). for insane in -1e200, 1e200: self.assertRaises(OverflowError, self.theclass.fromtimestamp, insane) def test_today(self): import time # We claim that today() is like fromtimestamp(time.time()), so # prove it. for dummy in range(3): today = self.theclass.today() ts = time.time() todayagain = self.theclass.fromtimestamp(ts) if today == todayagain: break # There are several legit reasons that could fail: # 1. It recently became midnight, between the today() and the # time() calls. # 2. The platform time() has such fine resolution that we'll # never get the same value twice. # 3. The platform time() has poor resolution, and we just # happened to call today() right before a resolution quantum # boundary. # 4. The system clock got fiddled between calls. # In any case, wait a little while and try again. time.sleep(0.1) # It worked or it didn't. If it didn't, assume it's reason #2, and # let the test pass if they're within half a second of each other. if today != todayagain: self.assertAlmostEqual(todayagain, today, delta=timedelta(seconds=0.5)) def test_weekday(self): for i in range(7): # 2017, 6, 2 is a Monday self.assertEqual(self.theclass(2017, 6, 2+i, True).weekday(), i) self.assertEqual(self.theclass( 2017, 6, 2+i, True).isoweekday(), i+1) # 2017, 1, 3 is a Monday self.assertEqual(self.theclass(2017, 1, 3+i).weekday(), i) self.assertEqual(self.theclass(2017, 1, 3+i).isoweekday(), i+1) def test_isocalendar(self): # Check examples from # http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm for i in range(7): d = self.theclass(2003, 11, 22+i) self.assertEqual(d.isocalendar(), (2003, 51, i+1)) d = self.theclass(2003, 11, 29) + timedelta(i) self.assertEqual(d.isocalendar(), (2003, 52, i+1)) d = self.theclass(2003, 12, 7+i) self.assertEqual(d.isocalendar(), (2004, 1, i+1)) d = self.theclass(2003, 12, 14+i) self.assertEqual(d.isocalendar(), (2004, 2, i+1)) d = self.theclass(2009, 11, 6+i) self.assertEqual(d.isocalendar(), (2009, 52, i+1)) d = self.theclass(2009, 11, 13+i) self.assertEqual(d.isocalendar(), (2009, 53, i+1)) d = self.theclass(2009, 11, 20+i) self.assertEqual(d.isocalendar(), (2010, 1, i+1)) def test_iso_long_years(self): from datetime import date # Calculate long ISO years and compare to table from # http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm ISO_LONG_YEARS_TABLE = """ 4 32 60 88 9 37 65 93 15 43 71 99 20 48 76 26 54 82 303 331 359 387 308 336 364 392 314 342 370 398 320 348 376 325 353 381 """ iso_long_years = sorted(map(int, ISO_LONG_YEARS_TABLE.split())) L = [] for i in range(101): d = self.theclass.fromsolardate(date(MIN_YEAR+i, 12, 31)) if d.isocalendar()[1] == 53: L.append(i + MIN_YEAR - 1600) d = self.theclass.fromsolardate(date(2000+i, 12, 31)) if d.isocalendar()[1] == 53: L.append(i) self.assertEqual(sorted(L), iso_long_years) def test_resolution_info(self): self.assertIsInstance(self.theclass.min, cnlunardate) self.assertIsInstance(self.theclass.max, cnlunardate) self.assertIsInstance(self.theclass.resolution, timedelta) self.assertTrue(self.theclass.max > self.theclass.min) def test_extreme_timedelta(self): big = self.theclass.max - self.theclass.min n = (big.days*24*3600 + big.seconds)*1000000 + big.microseconds justasbig = timedelta(0, 0, n) self.assertEqual(big, justasbig) self.assertEqual(self.theclass.min + big, self.theclass.max) self.assertEqual(self.theclass.max - big, self.theclass.min) def test_from_to_solardate(self): from datetime import date for y1, m1, d1, y2, m2, d2 in [ (1900, 1, 1, 1900, 1, 31), (1945, 10, 8, 1945, 11, 12), (2100, 12, 1, 2100, 12, 31)]: d = self.theclass(y1, m1, d1) solar = date(y2, m2, d2) fromsolar = self.theclass.fromsolardate(solar) self.assertEqual(d, fromsolar) tosolar = self.theclass.tosolardate(d) self.assertEqual(solar, tosolar) def test_timetuple(self): for i in range(7): # January 2, 1956 is a Monday (0) d = self.theclass(1955, 11, 20+i) t = d.timetuple() self.assertEqual(t, (1956, 1, 2+i, 0, 0, 0, i, 2+i, -1)) # February 1, 1956 is a Wednesday (2) d = self.theclass(1955, 12, 20+i) t = d.timetuple() self.assertEqual(t, (1956, 2, 1+i, 0, 0, 0, (2+i) % 7, 32+i, -1)) # March 1, 1956 is a Thursday (3), and is the 31+29+1 = 61st day # of the year. d = self.theclass(1956, 1, 19+i) t = d.timetuple() self.assertEqual(t, (1956, 3, 1+i, 0, 0, 0, (3+i) % 7, 61+i, -1)) self.assertEqual(t.tm_year, 1956) self.assertEqual(t.tm_mon, 3) self.assertEqual(t.tm_mday, 1+i) self.assertEqual(t.tm_hour, 0) self.assertEqual(t.tm_min, 0) self.assertEqual(t.tm_sec, 0) self.assertEqual(t.tm_wday, (3+i) % 7) self.assertEqual(t.tm_yday, 61+i) self.assertEqual(t.tm_isdst, -1) def test_pickling(self): args = 2015, 11, 27 orig = self.theclass(*args) for pickler, unpickler, proto in pickle_choices: green = pickler.dumps(orig, proto) derived = unpickler.loads(green) self.assertEqual(orig, derived) self.assertEqual(orig.__reduce__(), orig.__reduce_ex__(2)) def test_compat_unpickle(self): tests = [ b"ccnlunardate\ncnlunardate\n(S'\\x07\\xdf\\x0b\\x1b\\x00'\ntR.", b"ccnlunardate\ncnlunardate\n(U\x05\x07\xdf\x0b\x1b\x00tR.", b"\x80\x02ccnlunardate\ncnlunardate\nU\x05\x07\xdf\x0b\x1b\x00\x85R.", ] args = 2015, 11, 27 expected = self.theclass(*args) for data in tests: for loads in pickle_loads: derived = loads(data, encoding="latin1") self.assertEqual(derived, expected) def test_compare(self): t1 = self.theclass(2017, 6, 4) t2 = self.theclass(2017, 6, 4) self.assertEqual(t1, t2) self.assertTrue(t1 <= t2) self.assertTrue(t1 >= t2) self.assertFalse(t1 != t2) self.assertFalse(t1 < t2) self.assertFalse(t1 > t2) for args in (2018, 6, 3), (2017, 6, 4, True), (2017, 7, 4), (2017, 6, 5): t2 = self.theclass(*args) # this is larger than t1 self.assertTrue(t1 < t2) self.assertTrue(t2 > t1) self.assertTrue(t1 <= t2) self.assertTrue(t2 >= t1) self.assertTrue(t1 != t2) self.assertTrue(t2 != t1) self.assertFalse(t1 == t2) self.assertFalse(t2 == t1) self.assertFalse(t1 > t2) self.assertFalse(t2 < t1) self.assertFalse(t1 >= t2) self.assertFalse(t2 <= t1) for badarg in (10, 34.5, "abc", {}, [], ()): self.assertEqual(t1 == badarg, False) self.assertEqual(t1 != badarg, True) self.assertEqual(badarg == t1, False) self.assertEqual(badarg != t1, True) self.assertRaises(TypeError, lambda: t1 < badarg) self.assertRaises(TypeError, lambda: t1 > badarg) self.assertRaises(TypeError, lambda: t1 >= badarg) self.assertRaises(TypeError, lambda: badarg <= t1) self.assertRaises(TypeError, lambda: badarg < t1) self.assertRaises(TypeError, lambda: badarg > t1) self.assertRaises(TypeError, lambda: badarg >= t1) def test_mixed_compare(self): our = self.theclass(2000, 4, 5) # Our class can be compared for equality to other classes self.assertEqual(our == 1, False) self.assertEqual(1 == our, False) self.assertEqual(our != 1, True) self.assertEqual(1 != our, True) # But the ordering is undefined self.assertRaises(TypeError, lambda: our < 1) self.assertRaises(TypeError, lambda: 1 < our) # Repeat those tests with a different class class SomeClass: pass their = SomeClass() self.assertEqual(our == their, False) self.assertEqual(their == our, False) self.assertEqual(our != their, True) self.assertEqual(their != our, True) self.assertRaises(TypeError, lambda: our < their) self.assertRaises(TypeError, lambda: their < our) def test_bool(self): # All cnlunardates are considered true. self.assertTrue(self.theclass.min) self.assertTrue(self.theclass.max) def test_replace(self): cls = self.theclass args = [2017, 6, 5, False] base = cls(*args) self.assertEqual(base, base.replace()) i = 0 for name, newval in (("year", 2018), ("month", 7), ("day", 6), ("isLeapMonth", True)): newargs = args[:] newargs[i] = newval expected = cls(*newargs) got = base.replace(**{name: newval}) self.assertEqual(expected, got) i += 1 base = cls(2016, 1, 30) # Day is out of bounds. self.assertRaises(ValueError, base.replace, year=2017) # IsLeapMonth is wrong. self.assertRaises(ValueError, base.replace, isLeapMonth=True) def test_subclass_replace(self): class DateSubclass(self.theclass): pass dt = DateSubclass(2012, 1, 1) self.assertIs(type(dt.replace(year=2013)), DateSubclass) def test_subclass_cnlunardate(self): class C(self.theclass): theAnswer = 42 def __new__(cls, *args, **kws): temp = kws.copy() extra = temp.pop("extra") result = self.theclass.__new__(cls, *args, **temp) result.extra = extra return result def newmeth(self, start): return start + self.year + self.month args = 2003, 4, 14 dt1 = self.theclass(*args) dt2 = C(*args, **{"extra": 7}) self.assertEqual(dt2.__class__, C) self.assertEqual(dt2.theAnswer, 42) self.assertEqual(dt2.extra, 7) self.assertEqual(dt1.toordinal(), dt2.toordinal()) self.assertEqual(dt2.newmeth(-7), dt1.year + dt1.month - 7) def test_subclass_alternate_constructors(self): from datetime import datetime, date, time # Test that alternate constructors call the constructor class DateSubclass(self.theclass): def __new__(cls, *args, **kwargs): result = self.theclass.__new__(cls, *args, **kwargs) result.extra = 7 return result args = (2003, 3, 13) d_date = date(2003, 4, 14) # Equivalent solar date d_ord = 731319 # Equivalent ordinal base_d = DateSubclass(*args) self.assertIsInstance(base_d, DateSubclass) self.assertEqual(base_d.extra, 7) # Timestamp depends on time zone, so we'll calculate the equivalent here ts = datetime.combine(d_date, time(0)).timestamp() test_cases = [ ("fromsolardate", (d_date,)), ("fromordinal", (d_ord,)), ("fromtimestamp", (ts,)), ] for constr_name, constr_args in test_cases: for base_obj in (DateSubclass, base_d): # Test both the classmethod and method with self.subTest(base_obj_type=type(base_obj), constr_name=constr_name): constr = getattr(base_obj, constr_name) dt = constr(*constr_args) # Test that it creates the right subclass self.assertIsInstance(dt, DateSubclass) # Test that it's equal to the base object self.assertEqual(dt, base_d) # Test that it called the constructor self.assertEqual(dt.extra, 7) def test_pickling_subclass_date(self): args = 2006, 7, 23 orig = SubclassDate(*args) for pickler, unpickler, proto in pickle_choices: green = pickler.dumps(orig, proto) derived = unpickler.loads(green) self.assertEqual(orig, derived)
38.843972
88
0.560964
3,371
27,385
4.489766
0.144764
0.114172
0.046515
0.025834
0.503601
0.429865
0.353816
0.286554
0.242484
0.203898
0
0.071671
0.320833
27,385
704
89
38.899148
0.742083
0.096695
0
0.230337
0
0.001873
0.024296
0.007423
0
0
0
0
0.419476
1
0.076779
false
0.003745
0.018727
0.013109
0.140449
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
931a1eefc8b03639fa3b9346aaf0553aa1293c86
2,685
py
Python
webspider/utils/log.py
chem2099/webspider
5577878b69d85a4b75ece6701f6018767d0ee4bc
[ "MIT" ]
256
2017-06-01T10:56:41.000Z
2019-06-28T17:26:08.000Z
webspider/utils/log.py
chem2099/webspider
5577878b69d85a4b75ece6701f6018767d0ee4bc
[ "MIT" ]
14
2017-10-30T14:32:08.000Z
2019-05-27T09:49:39.000Z
webspider/utils/log.py
chem2099/webspider
5577878b69d85a4b75ece6701f6018767d0ee4bc
[ "MIT" ]
88
2017-09-01T08:06:48.000Z
2019-06-26T06:23:04.000Z
# coding: utf-8 import os import logging.config from webspider import setting LOG_FILE_PATH = os.path.join(setting.BASE_DIR, 'log', 'spider_log.txt') LOGGING_CONFIG = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'default': { 'format': '%(asctime)s- %(module)s:%(lineno)d [%(levelname)1.1s] %(name)s: %(message)s', 'datefmt': '%Y/%m/%d %H:%M:%S' }, }, 'handlers': { 'console': { 'level': 'DEBUG', 'formatter': 'default', 'class': 'logging.StreamHandler' }, 'smtp': { 'level': 'ERROR', 'class': 'logging.handlers.SMTPHandler', 'formatter': 'default', 'mailhost': (setting.SMTP_CONF['host'], setting.SMTP_CONF['port']), 'fromaddr': setting.SMTP_CONF['from_email'], 'toaddrs': [setting.SMTP_CONF['to_email'], ], 'subject': '爬虫系统出现异常', 'credentials': (setting.MAIL_CONF['username'], setting.MAIL_CONF['password']) }, 'file': { 'level': 'ERROR', 'formatter': 'default', 'class': 'logging.handlers.RotatingFileHandler', 'filename': LOG_FILE_PATH, 'encoding': 'utf8' }, }, 'loggers': { '': { 'handlers': ['console', 'file'], 'level': 'DEBUG', 'propagate': False, }, 'webspider': { 'handlers': ['console', 'file'], 'level': 'DEBUG', 'propagate': False, }, 'tornado': { 'handlers': ['console', 'file'], 'level': 'DEBUG', 'propagate': False, }, 'tornado.access': { 'handlers': ['console', 'file'], 'level': 'INFO', 'propagate': False, }, 'tornado.application': { 'handlers': ['console', 'file'], 'level': 'INFO', 'propagate': False, }, 'tornado.general': { 'handlers': ['console', 'file'], 'propagate': False, 'level': 'INFO', }, 'sqlalchemy.engine': { 'handlers': ['console', 'file'], 'level': 'INFO', 'propagate': False, }, 'gunicorn': { 'handlers': ['console', 'file'], 'level': 'INFO', 'propagate': False, }, 'celery': { 'handlers': ['console', 'file'], 'level': 'DEBUG', 'propagate': False, }, }, } def config_logging(): """配置日志""" logging.config.dictConfig(LOGGING_CONFIG)
27.397959
100
0.448417
206
2,685
5.752427
0.398058
0.126582
0.144304
0.162025
0.310549
0.310549
0.310549
0.167089
0
0
0
0.002931
0.364618
2,685
97
101
27.680412
0.691676
0.007076
0
0.37931
0
0.011494
0.346241
0.048872
0
0
0
0
0
1
0.011494
false
0.011494
0.034483
0
0.045977
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
931abc49c69a201ff08a4c4d4963b916bcd4009f
53,851
py
Python
manila/tests/share/drivers/hitachi/hnas/test_driver.py
kpawar89/manila
d487c2db728cedf8357b9f4acbc0a45c21c3a83e
[ "Apache-2.0" ]
1
2020-06-17T13:20:21.000Z
2020-06-17T13:20:21.000Z
manila/tests/share/drivers/hitachi/hnas/test_driver.py
viroel/manila
fbcabd2c03985000bd9b4d4d9a4478bc0b784efa
[ "Apache-2.0" ]
null
null
null
manila/tests/share/drivers/hitachi/hnas/test_driver.py
viroel/manila
fbcabd2c03985000bd9b4d4d9a4478bc0b784efa
[ "Apache-2.0" ]
1
2021-02-23T05:52:11.000Z
2021-02-23T05:52:11.000Z
# Copyright (c) 2015 Hitachi Data Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock import ddt from oslo_config import cfg from manila import exception import manila.share.configuration import manila.share.driver from manila.share.drivers.hitachi.hnas import driver from manila.share.drivers.hitachi.hnas import ssh from manila import test CONF = cfg.CONF share_nfs = { 'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'size': 50, 'host': 'hnas', 'share_proto': 'NFS', 'share_type_id': 1, 'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d', 'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d', 'export_locations': [{'path': '172.24.44.10:/shares/' 'aa4a7710-f326-41fb-ad18-b4ad587fc87a'}], } share_cifs = { 'id': 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7', 'name': 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7', 'size': 50, 'host': 'hnas', 'share_proto': 'CIFS', 'share_type_id': 1, 'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d', 'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d', 'export_locations': [{'path': '\\\\172.24.44.10\\' 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7'}], } share_invalid_host = { 'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'size': 50, 'host': 'invalid', 'share_proto': 'NFS', 'share_type_id': 1, 'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d', 'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d', 'export_locations': [{'path': '172.24.44.10:/shares/' 'aa4a7710-f326-41fb-ad18-b4ad587fc87a'}], } share_mount_support_nfs = { 'id': '62125744-fcdd-4f55-a8c1-d1498102f634', 'name': '62125744-fcdd-4f55-a8c1-d1498102f634', 'size': 50, 'host': 'hnas', 'share_proto': 'NFS', 'share_type_id': 1, 'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d', 'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d', 'export_locations': [{'path': '172.24.44.10:/shares/' '62125744-fcdd-4f55-a8c1-d1498102f634'}], 'mount_snapshot_support': True, } share_mount_support_cifs = { 'id': 'd6e7dc6b-f65f-49d9-968d-936f75474f29', 'name': 'd6e7dc6b-f65f-49d9-968d-936f75474f29', 'size': 50, 'host': 'hnas', 'share_proto': 'CIFS', 'share_type_id': 1, 'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d', 'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d', 'export_locations': [{'path': '172.24.44.10:/shares/' 'd6e7dc6b-f65f-49d9-968d-936f75474f29'}], 'mount_snapshot_support': True, } access_nfs_rw = { 'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0', 'access_type': 'ip', 'access_to': '172.24.44.200', 'access_level': 'rw', 'state': 'active', } access_cifs_rw = { 'id': '43167594-40e9-b899-1f4f-b9c2176b7564', 'access_type': 'user', 'access_to': 'fake_user', 'access_level': 'rw', 'state': 'active', } access_cifs_ro = { 'id': '32407088-1f4f-40e9-b899-b9a4176b574d', 'access_type': 'user', 'access_to': 'fake_user', 'access_level': 'ro', 'state': 'active', } snapshot_nfs = { 'id': 'abba6d9b-f29c-4bf7-aac1-618cda7aaf0f', 'share_id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'share': share_nfs, 'provider_location': '/snapshots/aa4a7710-f326-41fb-ad18-b4ad587fc87a/' 'abba6d9b-f29c-4bf7-aac1-618cda7aaf0f', 'size': 2, } snapshot_cifs = { 'id': '91bc6e1b-1ba5-f29c-abc1-da7618cabf0a', 'share_id': 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7', 'share': share_cifs, 'provider_location': '/snapshots/f5cadaf2-afbe-4cc4-9021-85491b6b76f7/' '91bc6e1b-1ba5-f29c-abc1-da7618cabf0a', 'size': 2, } manage_snapshot = { 'id': 'bc168eb-fa71-beef-153a-3d451aa1351f', 'share_id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'share': share_nfs, 'provider_location': '/snapshots/aa4a7710-f326-41fb-ad18-b4ad587fc87a' '/snapshot18-05-2106', } snapshot_mount_support_nfs = { 'id': '3377b015-a695-4a5a-8aa5-9b931b023380', 'share_id': '62125744-fcdd-4f55-a8c1-d1498102f634', 'share': share_mount_support_nfs, 'provider_location': '/snapshots/62125744-fcdd-4f55-a8c1-d1498102f634' '/3377b015-a695-4a5a-8aa5-9b931b023380', } snapshot_mount_support_cifs = { 'id': 'f9916515-5cb8-4612-afa6-7f2baa74223a', 'share_id': 'd6e7dc6b-f65f-49d9-968d-936f75474f29', 'share': share_mount_support_cifs, 'provider_location': '/snapshots/d6e7dc6b-f65f-49d9-968d-936f75474f29' '/f9916515-5cb8-4612-afa6-7f2baa74223a', } invalid_share = { 'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'size': 100, 'host': 'hnas', 'share_proto': 'HDFS', } invalid_snapshot = { 'id': '24dcdcb5-a582-4bcc-b462-641da143afee', 'share_id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a', 'share': invalid_share, } invalid_access_type = { 'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0', 'access_type': 'cert', 'access_to': 'manila_user', 'access_level': 'rw', 'state': 'active', } invalid_access_level = { 'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0', 'access_type': 'ip', 'access_to': 'manila_user', 'access_level': '777', 'state': 'active', } invalid_protocol_msg = ("Share backend error: Only NFS or CIFS protocol are " "currently supported. Share provided %(id)s with " "protocol %(proto)s." % {'id': invalid_share['id'], 'proto': invalid_share['share_proto']}) @ddt.ddt class HitachiHNASTestCase(test.TestCase): def setUp(self): super(HitachiHNASTestCase, self).setUp() CONF.set_default('driver_handles_share_servers', False) CONF.hitachi_hnas_evs_id = '2' CONF.hitachi_hnas_evs_ip = '172.24.44.10' CONF.hitachi_hnas_admin_network_ip = '10.20.30.40' CONF.hitachi_hnas_ip = '172.24.44.1' CONF.hitachi_hnas_ip_port = 'hitachi_hnas_ip_port' CONF.hitachi_hnas_user = 'hitachi_hnas_user' CONF.hitachi_hnas_password = 'hitachi_hnas_password' CONF.hitachi_hnas_file_system_name = 'file_system' CONF.hitachi_hnas_ssh_private_key = 'private_key' CONF.hitachi_hnas_cluster_admin_ip0 = None CONF.hitachi_hnas_stalled_job_timeout = 10 CONF.hitachi_hnas_driver_helper = ('manila.share.drivers.hitachi.hnas.' 'ssh.HNASSSHBackend') self.fake_conf = manila.share.configuration.Configuration(None) self.fake_private_storage = mock.Mock() self.mock_object(self.fake_private_storage, 'get', mock.Mock(return_value=None)) self.mock_object(self.fake_private_storage, 'delete', mock.Mock(return_value=None)) self._driver = driver.HitachiHNASDriver( private_storage=self.fake_private_storage, configuration=self.fake_conf) self._driver.backend_name = "hnas" self.mock_log = self.mock_object(driver, 'LOG') # mocking common backend calls self.mock_object(ssh.HNASSSHBackend, "check_fs_mounted", mock.Mock( return_value=True)) self.mock_object(ssh.HNASSSHBackend, "check_vvol") self.mock_object(ssh.HNASSSHBackend, "check_quota") self.mock_object(ssh.HNASSSHBackend, "check_cifs") self.mock_object(ssh.HNASSSHBackend, "check_export") self.mock_object(ssh.HNASSSHBackend, 'check_directory') @ddt.data('hitachi_hnas_driver_helper', 'hitachi_hnas_evs_id', 'hitachi_hnas_evs_ip', 'hitachi_hnas_ip', 'hitachi_hnas_user') def test_init_invalid_conf_parameters(self, attr_name): self.mock_object(manila.share.driver.ShareDriver, '__init__') setattr(CONF, attr_name, None) self.assertRaises(exception.InvalidParameterValue, self._driver.__init__) def test_init_invalid_credentials(self): self.mock_object(manila.share.driver.ShareDriver, '__init__') CONF.hitachi_hnas_password = None CONF.hitachi_hnas_ssh_private_key = None self.assertRaises(exception.InvalidParameterValue, self._driver.__init__) @ddt.data(True, False) def test_update_access_nfs(self, empty_rules): if not empty_rules: access1 = { 'access_type': 'ip', 'access_to': '172.24.10.10', 'access_level': 'rw' } access2 = { 'access_type': 'ip', 'access_to': '188.100.20.10', 'access_level': 'ro' } access_list = [access1, access2] access_list_updated = ( [access1['access_to'] + '(' + access1['access_level'] + ',norootsquash)', access2['access_to'] + '(' + access2['access_level'] + ')', ]) else: access_list = [] access_list_updated = [] self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule", mock.Mock()) self._driver.update_access('context', share_nfs, access_list, [], []) ssh.HNASSSHBackend.update_nfs_access_rule.assert_called_once_with( access_list_updated, share_id=share_nfs['id']) self.assertTrue(self.mock_log.debug.called) def test_update_access_ip_exception(self): access1 = { 'access_type': 'ip', 'access_to': '188.100.20.10', 'access_level': 'ro' } access2 = { 'access_type': 'something', 'access_to': '172.24.10.10', 'access_level': 'rw' } access_list = [access1, access2] self.assertRaises(exception.InvalidShareAccess, self._driver.update_access, 'context', share_nfs, access_list, [], []) def test_update_access_not_found_exception(self): access1 = { 'access_type': 'ip', 'access_to': '188.100.20.10', 'access_level': 'ro' } access2 = { 'access_type': 'something', 'access_to': '172.24.10.10', 'access_level': 'rw' } access_list = [access1, access2] self.mock_object(self._driver, '_ensure_share', mock.Mock( side_effect=exception.HNASItemNotFoundException(msg='fake'))) self.assertRaises(exception.ShareResourceNotFound, self._driver.update_access, 'context', share_nfs, access_list, add_rules=[], delete_rules=[]) @ddt.data([access_cifs_rw, 'acr'], [access_cifs_ro, 'ar']) @ddt.unpack def test_allow_access_cifs(self, access_cifs, permission): access_list_allow = [access_cifs] self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access') self._driver.update_access('context', share_cifs, [], access_list_allow, []) ssh.HNASSSHBackend.cifs_allow_access.assert_called_once_with( share_cifs['id'], 'fake_user', permission, is_snapshot=False) self.assertTrue(self.mock_log.debug.called) def test_allow_access_cifs_invalid_type(self): access_cifs_type_ip = { 'id': '43167594-40e9-b899-1f4f-b9c2176b7564', 'access_type': 'ip', 'access_to': 'fake_user', 'access_level': 'rw', 'state': 'active', } access_list_allow = [access_cifs_type_ip] self.assertRaises(exception.InvalidShareAccess, self._driver.update_access, 'context', share_cifs, [], access_list_allow, []) def test_deny_access_cifs(self): access_list_deny = [access_cifs_rw] self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access') self._driver.update_access('context', share_cifs, [], [], access_list_deny) ssh.HNASSSHBackend.cifs_deny_access.assert_called_once_with( share_cifs['id'], 'fake_user', is_snapshot=False) self.assertTrue(self.mock_log.debug.called) def test_deny_access_cifs_unsupported_type(self): access_cifs_type_ip = { 'id': '43167594-40e9-b899-1f4f-b9c2176b7564', 'access_type': 'ip', 'access_to': 'fake_user', 'access_level': 'rw', 'state': 'active', } access_list_deny = [access_cifs_type_ip] self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access') self._driver.update_access('context', share_cifs, [], [], access_list_deny) self.assertTrue(self.mock_log.warning.called) def test_update_access_invalid_share_protocol(self): self.mock_object(self._driver, '_ensure_share') ex = self.assertRaises(exception.ShareBackendException, self._driver.update_access, 'context', invalid_share, [], [], []) self.assertEqual(invalid_protocol_msg, ex.msg) def test_update_access_cifs_recovery_mode(self): access_list = [access_cifs_rw, access_cifs_ro] permission_list = [('fake_user1', 'acr'), ('fake_user2', 'ar')] self.mock_object(ssh.HNASSSHBackend, 'list_cifs_permissions', mock.Mock(return_value=permission_list)) self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access') self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access') self._driver.update_access('context', share_cifs, access_list, [], []) ssh.HNASSSHBackend.list_cifs_permissions.assert_called_once_with( share_cifs['id']) self.assertTrue(self.mock_log.debug.called) def _get_export(self, id, share_proto, ip, is_admin_only, is_snapshot=False): if share_proto.lower() == 'nfs': if is_snapshot: path = '/snapshots/' + id else: path = '/shares/' + id export = ':'.join((ip, path)) else: export = r'\\%s\%s' % (ip, id) return { "path": export, "is_admin_only": is_admin_only, "metadata": {}, } @ddt.data(share_nfs, share_cifs) def test_create_share(self, share): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "vvol_create") self.mock_object(ssh.HNASSSHBackend, "quota_add") self.mock_object(ssh.HNASSSHBackend, "nfs_export_add", mock.Mock( return_value='/shares/' + share['id'])) self.mock_object(ssh.HNASSSHBackend, "cifs_share_add") result = self._driver.create_share('context', share) self.assertTrue(self.mock_log.debug.called) ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share['id']) ssh.HNASSSHBackend.quota_add.assert_called_once_with(share['id'], share['size']) expected = [ self._get_export( share['id'], share['share_proto'], self._driver.hnas_evs_ip, False), self._get_export( share['id'], share['share_proto'], self._driver.hnas_admin_network_ip, True)] if share['share_proto'].lower() == 'nfs': ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with( share_nfs['id'], snapshot_id=None) self.assertFalse(ssh.HNASSSHBackend.cifs_share_add.called) else: ssh.HNASSSHBackend.cifs_share_add.assert_called_once_with( share_cifs['id'], snapshot_id=None) self.assertFalse(ssh.HNASSSHBackend.nfs_export_add.called) self.assertEqual(expected, result) def test_create_share_export_error(self): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "vvol_create") self.mock_object(ssh.HNASSSHBackend, "quota_add") self.mock_object(ssh.HNASSSHBackend, "nfs_export_add", mock.Mock( side_effect=exception.HNASBackendException('msg'))) self.mock_object(ssh.HNASSSHBackend, "vvol_delete") self.assertRaises(exception.HNASBackendException, self._driver.create_share, 'context', share_nfs) self.assertTrue(self.mock_log.debug.called) ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share_nfs['id']) ssh.HNASSSHBackend.quota_add.assert_called_once_with(share_nfs['id'], share_nfs['size']) ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with( share_nfs['id'], snapshot_id=None) ssh.HNASSSHBackend.vvol_delete.assert_called_once_with(share_nfs['id']) def test_create_share_invalid_share_protocol(self): self.mock_object(driver.HitachiHNASDriver, "_create_share", mock.Mock(return_value="path")) ex = self.assertRaises(exception.ShareBackendException, self._driver.create_share, 'context', invalid_share) self.assertEqual(invalid_protocol_msg, ex.msg) @ddt.data(share_nfs, share_cifs) def test_delete_share(self, share): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "nfs_export_del") self.mock_object(ssh.HNASSSHBackend, "cifs_share_del") self.mock_object(ssh.HNASSSHBackend, "vvol_delete") self._driver.delete_share('context', share) self.assertTrue(self.mock_log.debug.called) ssh.HNASSSHBackend.vvol_delete.assert_called_once_with(share['id']) if share['share_proto'].lower() == 'nfs': ssh.HNASSSHBackend.nfs_export_del.assert_called_once_with( share['id']) self.assertFalse(ssh.HNASSSHBackend.cifs_share_del.called) else: ssh.HNASSSHBackend.cifs_share_del.assert_called_once_with( share['id']) self.assertFalse(ssh.HNASSSHBackend.nfs_export_del.called) @ddt.data(snapshot_nfs, snapshot_cifs, snapshot_mount_support_nfs, snapshot_mount_support_cifs) def test_create_snapshot(self, snapshot): hnas_id = snapshot['share_id'] access_list = ['172.24.44.200(rw,norootsquash)', '172.24.49.180(all_squash,read_write,secure)', '172.24.49.110(ro, secure)', '172.24.49.112(secure,readwrite,norootsquash)', '172.24.49.142(read_only, secure)', '172.24.49.201(rw,read_write,readwrite)', '172.24.49.218(rw)'] ro_list = ['172.24.44.200(ro,norootsquash)', '172.24.49.180(all_squash,ro,secure)', '172.24.49.110(ro, secure)', '172.24.49.112(secure,ro,norootsquash)', '172.24.49.142(read_only, secure)', '172.24.49.201(ro,ro,ro)', '172.24.49.218(ro)'] export_locations = [ self._get_export( snapshot['id'], snapshot['share']['share_proto'], self._driver.hnas_evs_ip, False, is_snapshot=True), self._get_export( snapshot['id'], snapshot['share']['share_proto'], self._driver.hnas_admin_network_ip, True, is_snapshot=True)] expected = {'provider_location': '/snapshots/' + hnas_id + '/' + snapshot['id']} if snapshot['share'].get('mount_snapshot_support'): expected['export_locations'] = export_locations self.mock_object(ssh.HNASSSHBackend, "get_nfs_host_list", mock.Mock( return_value=access_list)) self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "is_cifs_in_use", mock.Mock( return_value=False)) self.mock_object(ssh.HNASSSHBackend, "tree_clone") self.mock_object(ssh.HNASSSHBackend, "nfs_export_add") self.mock_object(ssh.HNASSSHBackend, "cifs_share_add") out = self._driver.create_snapshot('context', snapshot) ssh.HNASSSHBackend.tree_clone.assert_called_once_with( '/shares/' + hnas_id, '/snapshots/' + hnas_id + '/' + snapshot['id']) self.assertEqual(expected, out) if snapshot['share']['share_proto'].lower() == 'nfs': ssh.HNASSSHBackend.get_nfs_host_list.assert_called_once_with( hnas_id) ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call( ro_list, share_id=hnas_id) ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call( access_list, share_id=hnas_id) else: ssh.HNASSSHBackend.is_cifs_in_use.assert_called_once_with( hnas_id) def test_create_snapshot_invalid_protocol(self): self.mock_object(self._driver, '_ensure_share') ex = self.assertRaises(exception.ShareBackendException, self._driver.create_snapshot, 'context', invalid_snapshot) self.assertEqual(invalid_protocol_msg, ex.msg) def test_create_snapshot_cifs_exception(self): cifs_excep_msg = ("Share backend error: CIFS snapshot when share is " "mounted is disabled. Set " "hitachi_hnas_allow_cifs_snapshot_while_mounted to " "True or unmount the share to take a snapshot.") self.mock_object(ssh.HNASSSHBackend, "is_cifs_in_use", mock.Mock( return_value=True)) ex = self.assertRaises(exception.ShareBackendException, self._driver.create_snapshot, 'context', snapshot_cifs) self.assertEqual(cifs_excep_msg, ex.msg) def test_create_snapshot_first_snapshot(self): hnas_id = snapshot_nfs['share_id'] self.mock_object(ssh.HNASSSHBackend, "get_nfs_host_list", mock.Mock( return_value=['172.24.44.200(rw)'])) self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "tree_clone", mock.Mock( side_effect=exception.HNASNothingToCloneException('msg'))) self.mock_object(ssh.HNASSSHBackend, "create_directory") self.mock_object(ssh.HNASSSHBackend, "nfs_export_add") self.mock_object(ssh.HNASSSHBackend, "cifs_share_add") self._driver.create_snapshot('context', snapshot_nfs) self.assertTrue(self.mock_log.warning.called) ssh.HNASSSHBackend.get_nfs_host_list.assert_called_once_with( hnas_id) ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call( ['172.24.44.200(ro)'], share_id=hnas_id) ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call( ['172.24.44.200(rw)'], share_id=hnas_id) ssh.HNASSSHBackend.create_directory.assert_called_once_with( '/snapshots/' + hnas_id + '/' + snapshot_nfs['id']) @ddt.data(snapshot_nfs, snapshot_cifs, snapshot_mount_support_nfs, snapshot_mount_support_cifs) def test_delete_snapshot(self, snapshot): hnas_share_id = snapshot['share_id'] hnas_snapshot_id = snapshot['id'] self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted") self.mock_object(ssh.HNASSSHBackend, "tree_delete") self.mock_object(ssh.HNASSSHBackend, "delete_directory") self.mock_object(ssh.HNASSSHBackend, "nfs_export_del") self.mock_object(ssh.HNASSSHBackend, "cifs_share_del") self._driver.delete_snapshot('context', snapshot) self.assertTrue(self.mock_log.debug.called) self.assertTrue(self.mock_log.info.called) driver.HitachiHNASDriver._check_fs_mounted.assert_called_once_with() ssh.HNASSSHBackend.tree_delete.assert_called_once_with( '/snapshots/' + hnas_share_id + '/' + snapshot['id']) ssh.HNASSSHBackend.delete_directory.assert_called_once_with( '/snapshots/' + hnas_share_id) if snapshot['share']['share_proto'].lower() == 'nfs': if snapshot['share'].get('mount_snapshot_support'): ssh.HNASSSHBackend.nfs_export_del.assert_called_once_with( snapshot_id=hnas_snapshot_id) else: ssh.HNASSSHBackend.nfs_export_del.assert_not_called() else: if snapshot['share'].get('mount_snapshot_support'): ssh.HNASSSHBackend.cifs_share_del.assert_called_once_with( hnas_snapshot_id) else: ssh.HNASSSHBackend.cifs_share_del.assert_not_called() def test_delete_managed_snapshot(self): hnas_id = manage_snapshot['share_id'] self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted") self.mock_object(ssh.HNASSSHBackend, "tree_delete") self.mock_object(ssh.HNASSSHBackend, "delete_directory") self.mock_object(ssh.HNASSSHBackend, "nfs_export_del") self.mock_object(ssh.HNASSSHBackend, "cifs_share_del") self._driver.delete_snapshot('context', manage_snapshot) self.assertTrue(self.mock_log.debug.called) self.assertTrue(self.mock_log.info.called) driver.HitachiHNASDriver._check_fs_mounted.assert_called_once_with() ssh.HNASSSHBackend.tree_delete.assert_called_once_with( manage_snapshot['provider_location']) ssh.HNASSSHBackend.delete_directory.assert_called_once_with( '/snapshots/' + hnas_id) @ddt.data(share_nfs, share_cifs) def test_ensure_share(self, share): result = self._driver.ensure_share('context', share) ssh.HNASSSHBackend.check_vvol.assert_called_once_with(share['id']) ssh.HNASSSHBackend.check_quota.assert_called_once_with(share['id']) expected = [ self._get_export( share['id'], share['share_proto'], self._driver.hnas_evs_ip, False), self._get_export( share['id'], share['share_proto'], self._driver.hnas_admin_network_ip, True)] if share['share_proto'].lower() == 'nfs': ssh.HNASSSHBackend.check_export.assert_called_once_with( share['id']) self.assertFalse(ssh.HNASSSHBackend.check_cifs.called) else: ssh.HNASSSHBackend.check_cifs.assert_called_once_with(share['id']) self.assertFalse(ssh.HNASSSHBackend.check_export.called) self.assertEqual(expected, result) def test_ensure_share_invalid_protocol(self): ex = self.assertRaises(exception.ShareBackendException, self._driver.ensure_share, 'context', invalid_share) self.assertEqual(invalid_protocol_msg, ex.msg) def test_shrink_share(self): self.mock_object(ssh.HNASSSHBackend, "get_share_usage", mock.Mock( return_value=10)) self.mock_object(ssh.HNASSSHBackend, "modify_quota") self._driver.shrink_share(share_nfs, 11) ssh.HNASSSHBackend.get_share_usage.assert_called_once_with( share_nfs['id']) ssh.HNASSSHBackend.modify_quota.assert_called_once_with( share_nfs['id'], 11) def test_shrink_share_new_size_lower_than_usage(self): self.mock_object(ssh.HNASSSHBackend, "get_share_usage", mock.Mock( return_value=10)) self.assertRaises(exception.ShareShrinkingPossibleDataLoss, self._driver.shrink_share, share_nfs, 9) ssh.HNASSSHBackend.get_share_usage.assert_called_once_with( share_nfs['id']) def test_extend_share(self): self.mock_object(ssh.HNASSSHBackend, "get_stats", mock.Mock( return_value=(500, 200, True))) self.mock_object(ssh.HNASSSHBackend, "modify_quota") self._driver.extend_share(share_nfs, 150) ssh.HNASSSHBackend.get_stats.assert_called_once_with() ssh.HNASSSHBackend.modify_quota.assert_called_once_with( share_nfs['id'], 150) def test_extend_share_with_no_available_space_in_fs(self): self.mock_object(ssh.HNASSSHBackend, "get_stats", mock.Mock( return_value=(500, 200, False))) self.mock_object(ssh.HNASSSHBackend, "modify_quota") self.assertRaises(exception.HNASBackendException, self._driver.extend_share, share_nfs, 1000) ssh.HNASSSHBackend.get_stats.assert_called_once_with() @ddt.data(share_nfs, share_cifs) def test_manage_existing(self, share): expected_exports = [ self._get_export( share['id'], share['share_proto'], self._driver.hnas_evs_ip, False), self._get_export( share['id'], share['share_proto'], self._driver.hnas_admin_network_ip, True)] expected_out = {'size': share['size'], 'export_locations': expected_exports} self.mock_object(ssh.HNASSSHBackend, "get_share_quota", mock.Mock( return_value=share['size'])) out = self._driver.manage_existing(share, 'option') self.assertEqual(expected_out, out) ssh.HNASSSHBackend.get_share_quota.assert_called_once_with( share['id']) def test_manage_existing_no_quota(self): self.mock_object(ssh.HNASSSHBackend, "get_share_quota", mock.Mock( return_value=None)) self.assertRaises(exception.ManageInvalidShare, self._driver.manage_existing, share_nfs, 'option') ssh.HNASSSHBackend.get_share_quota.assert_called_once_with( share_nfs['id']) def test_manage_existing_wrong_share_id(self): self.mock_object(self.fake_private_storage, 'get', mock.Mock(return_value='Wrong_share_id')) self.assertRaises(exception.HNASBackendException, self._driver.manage_existing, share_nfs, 'option') @ddt.data(':/', '1.1.1.1:/share_id', '1.1.1.1:/shares', '1.1.1.1:shares/share_id', ':/share_id') def test_manage_existing_wrong_path_format_nfs(self, wrong_location): expected_exception = ("Share backend error: Incorrect path. It " "should have the following format: " "IP:/shares/share_id.") self._test_manage_existing_wrong_path( share_nfs.copy(), expected_exception, wrong_location) @ddt.data('\\\\1.1.1.1', '1.1.1.1\\share_id', '1.1.1.1\\shares\\share_id', '\\\\1.1.1.1\\shares\\share_id', '\\\\share_id') def test_manage_existing_wrong_path_format_cifs(self, wrong_location): expected_exception = ("Share backend error: Incorrect path. It should " "have the following format: \\\\IP\\share_id.") self._test_manage_existing_wrong_path( share_cifs.copy(), expected_exception, wrong_location) def _test_manage_existing_wrong_path( self, share, expected_exception, wrong_location): share['export_locations'] = [{'path': wrong_location}] ex = self.assertRaises(exception.ShareBackendException, self._driver.manage_existing, share, 'option') self.assertEqual(expected_exception, ex.msg) def test_manage_existing_wrong_evs_ip(self): share_nfs['export_locations'] = [{'path': '172.24.44.189:/shares/' 'aa4a7710-f326-41fb-ad18-'}] self.assertRaises(exception.ShareBackendException, self._driver.manage_existing, share_nfs, 'option') def test_manage_existing_invalid_host(self): self.assertRaises(exception.ShareBackendException, self._driver.manage_existing, share_invalid_host, 'option') def test_manage_existing_invalid_protocol(self): self.assertRaises(exception.ShareBackendException, self._driver.manage_existing, invalid_share, 'option') @ddt.data(True, False) def test_unmanage(self, has_export_locations): share_copy = share_nfs.copy() if not has_export_locations: share_copy['export_locations'] = [] self._driver.unmanage(share_copy) self.assertTrue(self.fake_private_storage.delete.called) self.assertTrue(self.mock_log.info.called) def test_get_network_allocations_number(self): result = self._driver.get_network_allocations_number() self.assertEqual(0, result) @ddt.data([share_nfs, snapshot_nfs], [share_cifs, snapshot_cifs]) @ddt.unpack def test_create_share_from_snapshot(self, share, snapshot): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "vvol_create") self.mock_object(ssh.HNASSSHBackend, "quota_add") self.mock_object(ssh.HNASSSHBackend, "tree_clone") self.mock_object(ssh.HNASSSHBackend, "cifs_share_add") self.mock_object(ssh.HNASSSHBackend, "nfs_export_add") result = self._driver.create_share_from_snapshot('context', share, snapshot) ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share['id']) ssh.HNASSSHBackend.quota_add.assert_called_once_with(share['id'], share['size']) ssh.HNASSSHBackend.tree_clone.assert_called_once_with( '/snapshots/' + share['id'] + '/' + snapshot['id'], '/shares/' + share['id']) expected = [ self._get_export( share['id'], share['share_proto'], self._driver.hnas_evs_ip, False), self._get_export( share['id'], share['share_proto'], self._driver.hnas_admin_network_ip, True)] if share['share_proto'].lower() == 'nfs': ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with( share['id']) self.assertFalse(ssh.HNASSSHBackend.cifs_share_add.called) else: ssh.HNASSSHBackend.cifs_share_add.assert_called_once_with( share['id']) self.assertFalse(ssh.HNASSSHBackend.nfs_export_add.called) self.assertEqual(expected, result) def test_create_share_from_snapshot_empty_snapshot(self): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "vvol_create") self.mock_object(ssh.HNASSSHBackend, "quota_add") self.mock_object(ssh.HNASSSHBackend, "tree_clone", mock.Mock( side_effect=exception.HNASNothingToCloneException('msg'))) self.mock_object(ssh.HNASSSHBackend, "nfs_export_add") result = self._driver.create_share_from_snapshot('context', share_nfs, snapshot_nfs) expected = [ self._get_export( share_nfs['id'], share_nfs['share_proto'], self._driver.hnas_evs_ip, False), self._get_export( share_nfs['id'], share_nfs['share_proto'], self._driver.hnas_admin_network_ip, True)] self.assertEqual(expected, result) self.assertTrue(self.mock_log.warning.called) ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share_nfs['id']) ssh.HNASSSHBackend.quota_add.assert_called_once_with(share_nfs['id'], share_nfs['size']) ssh.HNASSSHBackend.tree_clone.assert_called_once_with( '/snapshots/' + share_nfs['id'] + '/' + snapshot_nfs['id'], '/shares/' + share_nfs['id']) ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with( share_nfs['id']) def test_create_share_from_snapshot_invalid_protocol(self): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "vvol_create") self.mock_object(ssh.HNASSSHBackend, "quota_add") self.mock_object(ssh.HNASSSHBackend, "tree_clone") ex = self.assertRaises(exception.ShareBackendException, self._driver.create_share_from_snapshot, 'context', invalid_share, snapshot_nfs) self.assertEqual(invalid_protocol_msg, ex.msg) def test_create_share_from_snapshot_cleanup(self): dest_path = '/snapshots/' + share_nfs['id'] + '/' + snapshot_nfs['id'] src_path = '/shares/' + share_nfs['id'] self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(ssh.HNASSSHBackend, "vvol_create") self.mock_object(ssh.HNASSSHBackend, "quota_add") self.mock_object(ssh.HNASSSHBackend, "tree_clone") self.mock_object(ssh.HNASSSHBackend, "vvol_delete") self.mock_object(ssh.HNASSSHBackend, "nfs_export_add", mock.Mock( side_effect=exception.HNASBackendException( msg='Error adding nfs export.'))) self.assertRaises(exception.HNASBackendException, self._driver.create_share_from_snapshot, 'context', share_nfs, snapshot_nfs) ssh.HNASSSHBackend.vvol_create.assert_called_once_with( share_nfs['id']) ssh.HNASSSHBackend.quota_add.assert_called_once_with( share_nfs['id'], share_nfs['size']) ssh.HNASSSHBackend.tree_clone.assert_called_once_with( dest_path, src_path) ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with( share_nfs['id']) ssh.HNASSSHBackend.vvol_delete.assert_called_once_with( share_nfs['id']) def test__check_fs_mounted(self): self._driver._check_fs_mounted() ssh.HNASSSHBackend.check_fs_mounted.assert_called_once_with() def test__check_fs_mounted_not_mounted(self): self.mock_object(ssh.HNASSSHBackend, 'check_fs_mounted', mock.Mock( return_value=False)) self.assertRaises(exception.HNASBackendException, self._driver._check_fs_mounted) ssh.HNASSSHBackend.check_fs_mounted.assert_called_once_with() def test__update_share_stats(self): fake_data = { 'share_backend_name': self._driver.backend_name, 'driver_handles_share_servers': self._driver.driver_handles_share_servers, 'vendor_name': 'Hitachi', 'driver_version': '4.0.0', 'storage_protocol': 'NFS_CIFS', 'total_capacity_gb': 1000, 'free_capacity_gb': 200, 'reserved_percentage': driver.CONF.reserved_share_percentage, 'qos': False, 'thin_provisioning': True, 'dedupe': True, 'revert_to_snapshot_support': True, 'mount_snapshot_support': True, } self.mock_object(ssh.HNASSSHBackend, 'get_stats', mock.Mock( return_value=(1000, 200, True))) self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted", mock.Mock()) self.mock_object(manila.share.driver.ShareDriver, '_update_share_stats') self._driver._update_share_stats() self.assertTrue(self._driver.hnas.get_stats.called) (manila.share.driver.ShareDriver._update_share_stats. assert_called_once_with(fake_data)) self.assertTrue(self.mock_log.info.called) @ddt.data(snapshot_nfs, snapshot_cifs, snapshot_mount_support_nfs, snapshot_mount_support_cifs) def test_ensure_snapshot(self, snapshot): result = self._driver.ensure_snapshot('context', snapshot) if snapshot['share'].get('mount_snapshot_support'): expected = [ self._get_export( snapshot['id'], snapshot['share']['share_proto'], self._driver.hnas_evs_ip, False, is_snapshot=True), self._get_export( snapshot['id'], snapshot['share']['share_proto'], self._driver.hnas_admin_network_ip, True, is_snapshot=True)] if snapshot['share']['share_proto'].lower() == 'nfs': ssh.HNASSSHBackend.check_export.assert_called_once_with( snapshot['id'], is_snapshot=True) self.assertFalse(ssh.HNASSSHBackend.check_cifs.called) else: ssh.HNASSSHBackend.check_cifs.assert_called_once_with( snapshot['id']) self.assertFalse(ssh.HNASSSHBackend.check_export.called) else: expected = None ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot['provider_location']) self.assertEqual(expected, result) def test_manage_existing_snapshot(self): self.mock_object(ssh.HNASSSHBackend, 'check_directory', mock.Mock(return_value=True)) self.mock_object(self._driver, '_ensure_snapshot', mock.Mock(return_value=[])) path_info = manage_snapshot['provider_location'].split('/') hnas_snapshot_id = path_info[3] out = self._driver.manage_existing_snapshot(manage_snapshot, {'size': 20}) ssh.HNASSSHBackend.check_directory.assert_called_with( '/snapshots/aa4a7710-f326-41fb-ad18-b4ad587fc87a' '/snapshot18-05-2106') self._driver._ensure_snapshot.assert_called_with( manage_snapshot, hnas_snapshot_id) self.assertEqual(20, out['size']) self.assertTrue(self.mock_log.debug.called) self.assertTrue(self.mock_log.info.called) @ddt.data(None, exception.HNASItemNotFoundException('Fake error.')) def test_manage_existing_snapshot_with_mount_support(self, exc): export_locations = [{ 'path': '172.24.44.10:/snapshots/' '3377b015-a695-4a5a-8aa5-9b931b023380'}] self.mock_object(ssh.HNASSSHBackend, 'check_directory', mock.Mock(return_value=True)) self.mock_object(self._driver, '_ensure_snapshot', mock.Mock(return_value=[], side_effect=exc)) self.mock_object(self._driver, '_get_export_locations', mock.Mock(return_value=export_locations)) if exc: self.mock_object(self._driver, '_create_export') path_info = snapshot_mount_support_nfs['provider_location'].split('/') hnas_snapshot_id = path_info[3] out = self._driver.manage_existing_snapshot( snapshot_mount_support_nfs, {'size': 20, 'export_locations': export_locations}) ssh.HNASSSHBackend.check_directory.assert_called_with( '/snapshots/62125744-fcdd-4f55-a8c1-d1498102f634' '/3377b015-a695-4a5a-8aa5-9b931b023380') self._driver._ensure_snapshot.assert_called_with( snapshot_mount_support_nfs, hnas_snapshot_id) self._driver._get_export_locations.assert_called_with( snapshot_mount_support_nfs['share']['share_proto'], hnas_snapshot_id, is_snapshot=True) if exc: self._driver._create_export.assert_called_with( snapshot_mount_support_nfs['share_id'], snapshot_mount_support_nfs['share']['share_proto'], snapshot_id=hnas_snapshot_id) self.assertEqual(20, out['size']) self.assertEqual(export_locations, out['export_locations']) self.assertTrue(self.mock_log.debug.called) self.assertTrue(self.mock_log.info.called) @ddt.data('fake_size', '128GB', '512 GB', {'size': 128}) def test_manage_snapshot_invalid_size_exception(self, size): self.assertRaises(exception.ManageInvalidShareSnapshot, self._driver.manage_existing_snapshot, manage_snapshot, {'size': size}) def test_manage_snapshot_size_not_provided_exception(self): self.assertRaises(exception.ManageInvalidShareSnapshot, self._driver.manage_existing_snapshot, manage_snapshot, {}) @ddt.data('/root/snapshot_id', '/snapshots/share1/snapshot_id', '/directory1', 'snapshots/share1/snapshot_id') def test_manage_snapshot_invalid_path_exception(self, path): snap_copy = manage_snapshot.copy() snap_copy['provider_location'] = path self.assertRaises(exception.ManageInvalidShareSnapshot, self._driver.manage_existing_snapshot, snap_copy, {'size': 20}) self.assertTrue(self.mock_log.debug.called) def test_manage_inexistent_snapshot_exception(self): self.mock_object(ssh.HNASSSHBackend, 'check_directory', mock.Mock(return_value=False)) self.assertRaises(exception.ManageInvalidShareSnapshot, self._driver.manage_existing_snapshot, manage_snapshot, {'size': 20}) self.assertTrue(self.mock_log.debug.called) def test_unmanage_snapshot(self): self._driver.unmanage_snapshot(snapshot_nfs) self.assertTrue(self.mock_log.info.called) @ddt.data({'snap': snapshot_nfs, 'exc': None}, {'snap': snapshot_cifs, 'exc': None}, {'snap': snapshot_nfs, 'exc': exception.HNASNothingToCloneException('fake')}, {'snap': snapshot_cifs, 'exc': exception.HNASNothingToCloneException('fake')}) @ddt.unpack def test_revert_to_snapshot(self, exc, snap): self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted") self.mock_object(ssh.HNASSSHBackend, 'tree_delete') self.mock_object(ssh.HNASSSHBackend, 'vvol_create') self.mock_object(ssh.HNASSSHBackend, 'quota_add') self.mock_object(ssh.HNASSSHBackend, 'tree_clone', mock.Mock(side_effect=exc)) self._driver.revert_to_snapshot('context', snap, None, None) driver.HitachiHNASDriver._check_fs_mounted.assert_called_once_with() ssh.HNASSSHBackend.tree_delete.assert_called_once_with( '/'.join(('/shares', snap['share_id']))) ssh.HNASSSHBackend.vvol_create.assert_called_once_with( snap['share_id']) ssh.HNASSSHBackend.quota_add.assert_called_once_with( snap['share_id'], 2) ssh.HNASSSHBackend.tree_clone.assert_called_once_with( '/'.join(('/snapshots', snap['share_id'], snap['id'])), '/'.join(('/shares', snap['share_id']))) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snap['provider_location']) if exc: self.assertTrue(self.mock_log.warning.called) self.assertTrue(self.mock_log.info.called) def test_nfs_snapshot_update_access_allow(self): access1 = { 'access_type': 'ip', 'access_to': '172.24.10.10', } access2 = { 'access_type': 'ip', 'access_to': '172.31.20.20', } access_list = [access1, access2] self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule") self._driver.snapshot_update_access('ctxt', snapshot_nfs, access_list, access_list, []) ssh.HNASSSHBackend.update_nfs_access_rule.assert_called_once_with( [access1['access_to'] + '(ro)', access2['access_to'] + '(ro)'], snapshot_id=snapshot_nfs['id']) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot_nfs['provider_location']) self.assertTrue(self.mock_log.debug.called) def test_nfs_snapshot_update_access_deny(self): access1 = { 'access_type': 'ip', 'access_to': '172.24.10.10', } self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule") self._driver.snapshot_update_access('ctxt', snapshot_nfs, [], [], [access1]) ssh.HNASSSHBackend.update_nfs_access_rule.assert_called_once_with( [], snapshot_id=snapshot_nfs['id']) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot_nfs['provider_location']) self.assertTrue(self.mock_log.debug.called) def test_nfs_snapshot_update_access_invalid_access_type(self): access1 = { 'access_type': 'user', 'access_to': 'user1', } self.assertRaises(exception.InvalidSnapshotAccess, self._driver.snapshot_update_access, 'ctxt', snapshot_nfs, [access1], [], []) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot_nfs['provider_location']) def test_cifs_snapshot_update_access_allow(self): access1 = { 'access_type': 'user', 'access_to': 'fake_user1', } self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access') self._driver.snapshot_update_access('ctxt', snapshot_cifs, [access1], [access1], []) ssh.HNASSSHBackend.cifs_allow_access.assert_called_with( snapshot_cifs['id'], access1['access_to'], 'ar', is_snapshot=True) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot_cifs['provider_location']) self.assertTrue(self.mock_log.debug.called) def test_cifs_snapshot_update_access_deny(self): access1 = { 'access_type': 'user', 'access_to': 'fake_user1', } self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access') self._driver.snapshot_update_access('ctxt', snapshot_cifs, [], [], [access1]) ssh.HNASSSHBackend.cifs_deny_access.assert_called_with( snapshot_cifs['id'], access1['access_to'], is_snapshot=True) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot_cifs['provider_location']) self.assertTrue(self.mock_log.debug.called) def test_cifs_snapshot_update_access_recovery_mode(self): access1 = { 'access_type': 'user', 'access_to': 'fake_user1', } access2 = { 'access_type': 'user', 'access_to': 'HDS\\fake_user2', } access_list = [access1, access2] permission_list = [('fake_user1', 'ar'), ('HDS\\fake_user2', 'ar')] formatted_user = r'"\{1}{0}\{1}"'.format(access2['access_to'], '"') self.mock_object(ssh.HNASSSHBackend, 'list_cifs_permissions', mock.Mock(return_value=permission_list)) self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access') self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access') self._driver.snapshot_update_access('ctxt', snapshot_cifs, access_list, [], []) ssh.HNASSSHBackend.list_cifs_permissions.assert_called_once_with( snapshot_cifs['id']) ssh.HNASSSHBackend.cifs_deny_access.assert_called_with( snapshot_cifs['id'], formatted_user, is_snapshot=True) ssh.HNASSSHBackend.cifs_allow_access.assert_called_with( snapshot_cifs['id'], access2['access_to'].replace('\\', '\\\\'), 'ar', is_snapshot=True) ssh.HNASSSHBackend.check_directory.assert_called_once_with( snapshot_cifs['provider_location']) self.assertTrue(self.mock_log.debug.called)
42.236078
79
0.624705
5,941
53,851
5.31594
0.068844
0.097429
0.050092
0.04683
0.7895
0.739345
0.690868
0.650244
0.596986
0.53863
0
0.041974
0.259847
53,851
1,274
80
42.269231
0.750389
0.011977
0
0.528015
0
0
0.175578
0.06317
0
0
0
0
0.169041
1
0.058879
false
0.001899
0.008547
0
0.069326
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
931acb0aff55dcc9d21d89e7ab39172fc95002c7
2,978
py
Python
sloth/test/dummy_data.py
maurov/xraysloth
6f18ddcb02050431574693d46bcf4b89c719c40b
[ "BSD-3-Clause" ]
4
2015-07-03T09:38:58.000Z
2022-03-16T11:09:49.000Z
sloth/test/dummy_data.py
maurov/xraysloth
6f18ddcb02050431574693d46bcf4b89c719c40b
[ "BSD-3-Clause" ]
null
null
null
sloth/test/dummy_data.py
maurov/xraysloth
6f18ddcb02050431574693d46bcf4b89c719c40b
[ "BSD-3-Clause" ]
2
2017-05-22T17:14:11.000Z
2017-07-04T04:52:08.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """Generate dummy data for tests/examples """ import numpy as np def dummy_gauss_image(x=None, y=None, xhalfrng=1.5, yhalfrng=None, xcen=0.5, ycen=0.9, xnpts=1024, ynpts=None, xsigma=0.55, ysigma=0.25, noise=0.3): """Create a dummy 2D Gaussian image with noise Parameters ---------- x, y : 1D arrays (optional) arrays where to generate the image [None -> generated] xhalfrng : float (optional) half range of the X axis [1.5] yhalfrng : float or None (optional) half range of the Y axis [None -> xhalfrng] xcen : float (optional) X center [0.5] ycen : float (optional) Y center [0.9] xnpts : int (optional) number of points X [1024] ynpts : int or None (optional) number of points Y [None -> xnpts] xsigma : float (optional) sigma X [0.55] ysigma : float (optional) sigma Y [0.25] noise : float (optional) random noise level between 0 and 1 [0.3] Returns ------- x, y : 1D arrays signal : 2D array """ if yhalfrng is None: yhalfrng = xhalfrng if ycen is None: ycen = xcen if ynpts is None: ynpts = xnpts if x is None: x = np.linspace(xcen-xhalfrng, xcen+xhalfrng, xnpts) if y is None: y = np.linspace(ycen-yhalfrng, ycen+yhalfrng, ynpts) xx, yy = np.meshgrid(x, y) signal = np.exp(-((xx-xcen)**2 / (2*xsigma**2) + ((yy-ycen)**2 / (2*ysigma**2)))) # add noise signal += noise * np.random.random(size=signal.shape) return x, y, signal def dummy_gauss_curve(xhalfrng=15, xcen=5, xnpts=512, xsigma=0.65, noise=0.3): """Create a dummy 1D Gaussian curve with noise Parameters ---------- xhalfrng : float (optional) half range of the X axis [1.5] xcen : float (optional) X center [0.5] xnpts : int (optional) number of points X [1024] xsigma : float (optional) sigma X [0.55] noise : float (optional) random noise level between 0 and 1 [0.3] Returns ------- x, signal : 1D arrays """ x = np.linspace(xcen-xhalfrng, xcen+xhalfrng, xnpts) signal = np.exp(-((x-xcen)**2 / (2*xsigma**2))) # add noise signal += noise * np.random.random(size=signal.shape) return x, signal def main(): """Show two plot windows with dummy data""" from silx import sx sx.enable_gui() from sloth.gui.plot.plot1D import Plot1D from sloth.gui.plot.plot2D import Plot2D p1 = Plot1D() p2 = Plot2D() x, y = dummy_gauss_curve() p1.addCurve(x, y, legend="test dummy Gaussian with noise") p1.show() x, y, signal = dummy_gauss_image() p2.addImage(signal, x=x, y=y, legend="test dummy image") p2.show() input("Press enter to close windows") if __name__ == '__main__': main()
28.09434
78
0.576897
424
2,978
4.011792
0.261792
0.076426
0.029982
0.03351
0.382716
0.355085
0.332745
0.269253
0.18107
0.18107
0
0.043685
0.292814
2,978
105
79
28.361905
0.764008
0.416723
0
0.097561
1
0
0.053316
0
0
0
0
0
0
1
0.073171
false
0
0.097561
0
0.219512
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
931c579da4bce33b29c5c9ae99f9be7566e00afb
2,984
py
Python
tools/scruffy/checkers/orgs.py
paultag/pupa
137293925503496e15137540e049bf544e129971
[ "BSD-3-Clause" ]
null
null
null
tools/scruffy/checkers/orgs.py
paultag/pupa
137293925503496e15137540e049bf544e129971
[ "BSD-3-Clause" ]
null
null
null
tools/scruffy/checkers/orgs.py
paultag/pupa
137293925503496e15137540e049bf544e129971
[ "BSD-3-Clause" ]
null
null
null
from .. import Check from .common import common_checks def check(db): for org in db.organizations.find({"classification": "legislature"}): for check in common_checks(org, 'organization', 'organizations'): yield check jid = org.get('jurisdiction_id') if jid is None: yield Check(collection='organizations', id=org['_id'], tagname='org-has-no-jurisdiction', severity='critical') continue jorgs = list(db.organizations.find({ "classification": "legislature", "jurisdiction_id": org['jurisdiction_id'] })) if len(jorgs) != 1 and len(set([x['chamber'] for x in jorgs])) != len(jorgs): yield Check(collection='organizations', id=org['_id'], tagname='jurisdiction_id-has-duped-orgs-by-chamber', severity='critical', data=[[x['chamber'], x['_id']] for x in jorgs if x['chamber'] == org['chamber']]) if org.get('parent_id'): yield Check(collection='organizations', id=org['_id'], tagname='jurisdiction-has-a-parent', severity='important') if "/" not in jid: yield Check(collection='organizations', id=org['_id'], tagname='jurisdiction-has-no-slashes', severity='grave') else: prefix, uid = jid.split("/", 1) uid, what = uid.rsplit("/", 1) if prefix != 'ocd-jurisdiction': yield Check(collection='organizations', id=org['_id'], tagname='org-has-bad-jurisdiction-id-prefix', severity='critical') if ":" in what: yield Check(collection='organizations', id=org['_id'], tagname='org-has-malformed-jurisdiction-id-ender', severity='critical') kvp = [f.split(":") for f in uid.split("/")] if any((len(x) != 2) for x in kvp): yield Check(collection='organizations', id=org['_id'], tagname='org-has-malformed-jurisdiction-id-path', severity='critical') for org in db.organizations.find({"classification": "party"}): if 'jurisdiction_id' in org and org['jurisdiction_id']: yield Check(collection='organizations', id=org['_id'], tagname='party-has-jurisdiction-id', severity='critical', data={ "jurisdiction_id": org['jurisdiction_id'] })
40.324324
76
0.464142
268
2,984
5.093284
0.238806
0.123077
0.117216
0.193407
0.506227
0.420513
0.420513
0.36044
0.36044
0.279853
0
0.00227
0.409517
2,984
73
77
40.876712
0.772418
0
0
0.380952
0
0
0.23559
0.08445
0
0
0
0
0
1
0.015873
false
0
0.047619
0
0.063492
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
931f8afd4a5774ac2d04ce2cc9a73a4ae7812abb
645
py
Python
tests/test_unit/test_graph/test_tesserae.py
karljohanw/cortexpy
70dcce771136f98edb5250ad8abd2a46bda7f0a6
[ "Apache-2.0" ]
null
null
null
tests/test_unit/test_graph/test_tesserae.py
karljohanw/cortexpy
70dcce771136f98edb5250ad8abd2a46bda7f0a6
[ "Apache-2.0" ]
null
null
null
tests/test_unit/test_graph/test_tesserae.py
karljohanw/cortexpy
70dcce771136f98edb5250ad8abd2a46bda7f0a6
[ "Apache-2.0" ]
null
null
null
from cortexpy.tesserae import Tesserae class TestTesserae: def test_mosaic_alignment_on_short_query_and_two_templates(self): # given query = "GTAGGCGAGATGACGCCAT" targets = ["GTAGGCGAGTCCCGTTTATA", "CCACAGAAGATGACGCCATT"] # when t = Tesserae() p = t.align(query, targets) # then assert len(p) == 3 assert p[1][0] == 'template0' assert p[1][1] == 'GTAGGCG' assert p[1][2] == 0 assert p[1][3] == 6 assert p[2][0] == 'template1' assert p[2][1] == ' AGATGACGCCAT' assert p[2][2] == 7 assert p[2][3] == 18
24.807692
69
0.541085
77
645
4.428571
0.519481
0.164223
0.093842
0
0
0
0
0
0
0
0
0.055046
0.324031
645
25
70
25.8
0.727064
0.023256
0
0
0
0
0.164537
0
0
0
0
0
0.5625
1
0.0625
false
0
0.0625
0
0.1875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
1
932270b633249ca330baa6276f57f5c2b0d91f48
1,814
py
Python
deli_counter/http/mounts/root/routes/v1/validation_models/regions.py
sandwichcloud/deli-counter
e72ee3966f4457ffbd1d432b3516965075c7c86e
[ "MIT" ]
1
2017-10-19T09:25:29.000Z
2017-10-19T09:25:29.000Z
deli_counter/http/mounts/root/routes/v1/validation_models/regions.py
sandwichcloud/deli-counter
e72ee3966f4457ffbd1d432b3516965075c7c86e
[ "MIT" ]
null
null
null
deli_counter/http/mounts/root/routes/v1/validation_models/regions.py
sandwichcloud/deli-counter
e72ee3966f4457ffbd1d432b3516965075c7c86e
[ "MIT" ]
null
null
null
from schematics import Model from schematics.types import IntType, UUIDType, StringType, BooleanType from ingredients_db.models.region import RegionState, Region from ingredients_http.schematics.types import ArrowType, EnumType class RequestCreateRegion(Model): name = StringType(required=True, min_length=3) datacenter = StringType(required=True) image_datastore = StringType(required=True) image_folder = StringType() class ParamsRegion(Model): region_id = UUIDType(required=True) class ParamsListRegion(Model): name = StringType() limit = IntType(default=100, max_value=100, min_value=1) marker = UUIDType() class RequestRegionSchedule(Model): schedulable = BooleanType(required=True) class ResponseRegion(Model): id = UUIDType(required=True) name = StringType(required=True, min_length=3) datacenter = StringType(required=True, ) image_datastore = StringType(required=True) image_folder = StringType() schedulable = BooleanType(required=True) state = EnumType(RegionState, required=True) current_task_id = UUIDType() created_at = ArrowType(required=True) updated_at = ArrowType(required=True) @classmethod def from_database(cls, region: Region): region_model = cls() region_model.id = region.id region_model.name = region.name region_model.datacenter = region.datacenter region_model.image_datastore = region.image_datastore region_model.image_folder = region.image_folder region_model.schedulable = region.schedulable region_model.state = region.state region_model.current_task_id = region.current_task_id region_model.created_at = region.created_at region_model.updated_at = region.updated_at return region_model
31.275862
71
0.738699
206
1,814
6.31068
0.257282
0.12
0.101538
0.083077
0.192308
0.192308
0.192308
0.192308
0.192308
0.192308
0
0.006061
0.181367
1,814
57
72
31.824561
0.86936
0
0
0.190476
0
0
0
0
0
0
0
0
0
1
0.02381
false
0
0.095238
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
932632d6c40f6be38b39dba6166b3aebd0e53a0b
257
py
Python
winremoteenum.py
simondotsh/WinRemoteEnum
f40cff76788709e54e7ed19157870dacfcaa0bc3
[ "MIT" ]
2
2021-11-25T14:15:12.000Z
2022-02-02T04:27:17.000Z
winremoteenum.py
simondotsh/WinRemoteEnum
f40cff76788709e54e7ed19157870dacfcaa0bc3
[ "MIT" ]
null
null
null
winremoteenum.py
simondotsh/WinRemoteEnum
f40cff76788709e54e7ed19157870dacfcaa0bc3
[ "MIT" ]
2
2021-11-03T18:11:40.000Z
2022-02-02T15:11:30.000Z
#!/usr/bin/env python3 from src.cli import Cli from src.core import Orchestrator def main(): config, args = Cli.parse_and_validate() Orchestrator.launch_modules(config, args.modules, args.targets, args.audit) if __name__ == '__main__': main()
23.363636
79
0.727626
36
257
4.888889
0.638889
0.079545
0
0
0
0
0
0
0
0
0
0.004587
0.151751
257
11
80
23.363636
0.802752
0.081712
0
0
0
0
0.033898
0
0
0
0
0
0
1
0.142857
true
0
0.285714
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
93280c05a831eacbb39a47e2e89a3ee261d3a78f
1,696
py
Python
setup.py
easyScience/easyCore
5d16d5b27803277d0c44886f94dab599f764ae0b
[ "BSD-3-Clause" ]
2
2021-11-02T10:22:45.000Z
2022-02-18T23:41:19.000Z
setup.py
easyScience/easyCore
5d16d5b27803277d0c44886f94dab599f764ae0b
[ "BSD-3-Clause" ]
114
2020-06-30T08:52:27.000Z
2022-03-30T20:47:56.000Z
setup.py
easyScience/easyCore
5d16d5b27803277d0c44886f94dab599f764ae0b
[ "BSD-3-Clause" ]
1
2022-03-04T13:01:09.000Z
2022-03-04T13:01:09.000Z
# -*- coding: utf-8 -*- # DO NOT EDIT THIS FILE! # This file has been autogenerated by dephell <3 # https://github.com/dephell/dephell try: from setuptools import setup except ImportError: from distutils.core import setup import os.path readme = '' here = os.path.abspath(os.path.dirname(__file__)) readme_path = os.path.join(here, 'README.rst') if os.path.exists(readme_path): with open(readme_path, 'rb') as stream: readme = stream.read().decode('utf8') setup( long_description=readme, name='easyScienceCore', version='0.1.0', description='Generic logic for easyScience libraries', python_requires='==3.*,>=3.7.0', project_urls={"documentation": "https://github.com/easyScience/easyCore", "homepage": "https://github.com/easyScience/easyCore"}, author='Simon Ward', license='GPL-3.0', classifiers=['Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Scientific/Engineering :: Physics', 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', 'Programming Language :: Python :: 3 :: Only'], packages=['easyCore', 'easyCore.Datasets', 'easyCore.Elements', 'easyCore.Elements.Basic', 'easyCore.Elements.HigherLevel', 'easyCore.Fitting', 'easyCore.Objects', 'easyCore.Symmetry', 'easyCore.Utils', 'easyCore.Utils.Hugger', 'easyCore.Utils.io'], package_dir={"": "."}, package_data={"easyCore.Elements": ["*.json"], "easyCore.Symmetry": ["*.json"]}, install_requires=['asteval==0.*,>=0.9.23', 'bumps==0.*,>=0.8.0', 'dfo-ls==1.*,>=1.2.0', 'lmfit==1.*,>=1.0.0', 'numpy==1.*,>=1.19.0', 'pint==0.*,>=0.16.0', 'uncertainties==3.*,>=3.1.0', 'xarray==0.*,>=0.16.0'], )
43.487179
266
0.661557
222
1,696
4.995496
0.554054
0.027051
0.037872
0.045086
0.059513
0
0
0
0
0
0
0.035111
0.126769
1,696
38
267
44.631579
0.713707
0.074292
0
0
1
0
0.533248
0.090793
0
0
0
0
0
1
0
false
0
0.153846
0
0.153846
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9328e23e8d93546efc5261e03fba860e2483f556
1,505
py
Python
submit_scripts/mizuRoute/mizuRoute_wrapper.py
BrisClimate/flood-cascade
660c29275a87785153d0f107ed23104fcbcbddee
[ "MIT" ]
null
null
null
submit_scripts/mizuRoute/mizuRoute_wrapper.py
BrisClimate/flood-cascade
660c29275a87785153d0f107ed23104fcbcbddee
[ "MIT" ]
null
null
null
submit_scripts/mizuRoute/mizuRoute_wrapper.py
BrisClimate/flood-cascade
660c29275a87785153d0f107ed23104fcbcbddee
[ "MIT" ]
3
2020-11-08T16:01:47.000Z
2021-01-13T17:13:32.000Z
#!/cm/shared/languages/python-3.3.2/bin/python # submit script for submission of mizuRoute simualtions # Peter Uhe Oct 29 2019 # # call this script from 'run_mizuRoute_templated_mswep050calib.py which creates a qsub job to submit to the HPC queue # This script is actually called from 'call_pythonscript.sh' (which is needed to load modules before calling the script) import os,glob,subprocess,sys,shutil,multiprocessing import datetime def call_subproc(cmd,logfile): subprocess.call(cmd,stdout=open(logfile,'w'),stderr=subprocess.STDOUT) # Print start time print('Starting:',datetime.datetime.now()) # Get environment variables control_files = os.environ['CONTROL_FLIST'].split(':') logdir = os.environ['LOGDIR'] ncpus = int(os.environ['NCPUS']) mizuexe = os.environ['MIZU_EXE'] print('running simulations',len(control_files)) print(os.environ['CONTROL_FLIST']) pool = multiprocessing.Pool(processes=ncpus) for control_file in control_files: # Todo, could add check if this simulation has already been run fname = os.path.basename(control_file) sim_name =fname[8:-4] logfile = os.path.join(logdir,sim_name+'.log') cmd = ['time',mizuexe,control_file] print('command',cmd) print('log',logfile) #ret = pool.apply_async(subprocess.call,cmd,{'stdout':open(logfile,'w') ,'stderr':subprocess.STDOUT}) #subprocess.call(cmd,stdout=open(logfile,'w'),stderr=subprocess.STDOUT) ret = pool.apply_async(call_subproc,[cmd,logfile]) pool.close() pool.join() print('Finished:',datetime.datetime.now())
35
120
0.765449
220
1,505
5.15
0.504545
0.039718
0.045013
0.0609
0.150927
0.150927
0.150927
0.150927
0.150927
0.150927
0
0.010287
0.095681
1,505
42
121
35.833333
0.82219
0.419269
0
0
0
0
0.118192
0
0
0
0
0.02381
0
1
0.043478
false
0
0.086957
0
0.130435
0.26087
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
932fbbd3b2489556c428736a9418ed424b87b707
339
py
Python
datasets/__init__.py
radarsat1/latentspace
e651dc14ed7b4dd3f28577639a06b7160c5fca5e
[ "MIT" ]
null
null
null
datasets/__init__.py
radarsat1/latentspace
e651dc14ed7b4dd3f28577639a06b7160c5fca5e
[ "MIT" ]
null
null
null
datasets/__init__.py
radarsat1/latentspace
e651dc14ed7b4dd3f28577639a06b7160c5fca5e
[ "MIT" ]
null
null
null
__all__ = ['get_dataset'] def get_dataset(params): if params['name'] == 'multimodal_points': from datasets.multimodal_gaussian_2d import Dataset return Dataset(params) elif params['name'] == 'kicks': from datasets.kicks import Dataset return Dataset(params) assert False and 'Unknown dataset'
28.25
59
0.675516
39
339
5.641026
0.538462
0.177273
0.172727
0.236364
0.290909
0
0
0
0
0
0
0.003802
0.224189
339
11
60
30.818182
0.8327
0
0
0.222222
0
0
0.16568
0
0
0
0
0
0.111111
1
0.111111
false
0
0.222222
0
0.555556
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9333f46a3199aee3308e67cf5584433366413e92
761
py
Python
tests/test_net_sender_proxy.py
nicoddemus/aioworkers
4ab85064844dc28141833d1348989d8c891f3d7d
[ "Apache-2.0" ]
45
2017-04-26T23:50:30.000Z
2021-12-29T03:21:06.000Z
tests/test_net_sender_proxy.py
nicoddemus/aioworkers
4ab85064844dc28141833d1348989d8c891f3d7d
[ "Apache-2.0" ]
63
2017-08-01T10:35:45.000Z
2022-03-01T18:07:49.000Z
tests/test_net_sender_proxy.py
nicoddemus/aioworkers
4ab85064844dc28141833d1348989d8c891f3d7d
[ "Apache-2.0" ]
6
2017-10-19T08:21:23.000Z
2021-12-29T03:25:32.000Z
import pytest @pytest.fixture def config_yaml(): return """ local_sender: cls: aioworkers.net.sender.proxy.Facade queue: queue1 queue1: cls: aioworkers.queue.base.Queue worker: cls: aioworkers.net.sender.proxy.Worker autorun: true input: queue1 sender: remote_sender remote_sender: cls: aioworkers.net.sender.proxy.Facade queue: queue2 queue2: cls: aioworkers.queue.base.Queue """ async def test_proxy_chains(context): await context.local_sender.send( to='example@example.com', subject='test', content='text', html='<b>text</b>', ) msg = await context.queue2.get() assert msg['subject'] == 'test'
20.026316
47
0.603154
86
761
5.255814
0.465116
0.143805
0.106195
0.146018
0.373894
0.19469
0.19469
0.19469
0
0
0
0.010989
0.282523
761
37
48
20.567568
0.81685
0
0
0.137931
0
0
0.624179
0.204993
0
0
0
0
0.034483
1
0.034483
false
0
0.034483
0.034483
0.103448
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
93371a58fa9ad1c5d98cbf050ae2c7e71f6f8acc
1,074
py
Python
menu.py
dadiletta/Saber
515751e7e9f0f12d97fa07606883eac73a352044
[ "MIT", "BSD-3-Clause" ]
null
null
null
menu.py
dadiletta/Saber
515751e7e9f0f12d97fa07606883eac73a352044
[ "MIT", "BSD-3-Clause" ]
null
null
null
menu.py
dadiletta/Saber
515751e7e9f0f12d97fa07606883eac73a352044
[ "MIT", "BSD-3-Clause" ]
null
null
null
import Light __author__ = 'adilettad' print("---------------") print("----Welcome----") print("------to-------") print("-----Saber-----") print("---------------") sab = Light.Saber() while True: command = raw_input('Your command:') if command == "blink": sab.demoLED() elif command == "dist" or command == "range": sab.demoRange() elif command == "watch" or command == "cover": sab.coverCheck() elif command =="knob": sab.demoKnob() elif command =="lcd": sab.demoLCD() elif command =="temp": sab.demoTemp() elif command =="buzzer": sab.demoBuzzer() elif command =="button": sab.demoButton() elif command =="clear": sab.clear() elif command =="maker": sab.demoMaker() elif command =="menu": exampleMenu = ["One", "Two", "Three", "Four", "Five"] print(sab.formMenu(exampleMenu)+1) elif command =="nav": sab.rootMenu() elif command =="gui": sab.launchGUI() ##Need Ctrl+C protection on all methods
22.851064
61
0.529795
109
1,074
5.174312
0.541284
0.234043
0
0
0
0
0
0
0
0
0
0.001252
0.256052
1,074
46
62
23.347826
0.704631
0.034451
0
0.054054
0
0
0.176983
0
0
0
0
0
0
1
0
false
0
0.027027
0
0.027027
0.162162
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9338957c2d0434f6f713ebe7005892553e33c34d
456
py
Python
packages/migrations/0004_auto_20210416_1013.py
dandeduck/package-tracking-web
f7cb3dffd6f7f6b7ced5b1106a049c79c192dfa5
[ "MIT" ]
1
2021-02-11T22:16:51.000Z
2021-02-11T22:16:51.000Z
packages/migrations/0004_auto_20210416_1013.py
dandeduck/package-tracking-web
f7cb3dffd6f7f6b7ced5b1106a049c79c192dfa5
[ "MIT" ]
54
2021-02-11T18:52:11.000Z
2021-06-13T13:45:01.000Z
packages/migrations/0004_auto_20210416_1013.py
dandeduck/package-tracking-web
f7cb3dffd6f7f6b7ced5b1106a049c79c192dfa5
[ "MIT" ]
null
null
null
# Generated by Django 2.2.12 on 2021-04-16 10:13 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('packages', '0003_auto_20210416_1007'), ] operations = [ migrations.AlterField( model_name='address', name='street_number', field=models.PositiveSmallIntegerField(blank=True, default=0), preserve_default=False, ), ]
22.8
74
0.620614
47
456
5.893617
0.829787
0
0
0
0
0
0
0
0
0
0
0.099698
0.274123
456
19
75
24
0.73716
0.100877
0
0
1
0
0.125
0.056373
0
0
0
0
0
1
0
false
0
0.076923
0
0.307692
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
933e03200f3271fcfb99f615ab7c6f6d828aa8d2
3,150
py
Python
Excite.py
JohnDoe2576/MyPythonCodes
9d714bc9f9909af0ade4142439da518b30b7d51d
[ "MIT" ]
null
null
null
Excite.py
JohnDoe2576/MyPythonCodes
9d714bc9f9909af0ade4142439da518b30b7d51d
[ "MIT" ]
null
null
null
Excite.py
JohnDoe2576/MyPythonCodes
9d714bc9f9909af0ade4142439da518b30b7d51d
[ "MIT" ]
null
null
null
import numpy as np import matplotlib.pyplot as plt def aprbs(**parms): # Generate an Amplitude modulated Pseudo-Random Binary Sequence (APRBS) # # The Pseudo-Random Binary Sequence (PRBS) is extensively used as an # excitation signal for System Identification of linear systems. It is # characterized by randomly delayed shifts in amplitude between a # user-defined minimum and maximum. These delayed shifts are usually # range-bound, and are very helpful in capturing the system behaviour # close to the operating frequency of the system. # # A nonlinear system usually will have different behaviour at different # amplitudes and cannot be predicted with the princlipe of superposition. # Hence, the excitation signal also need to be modified to accomodate # for capturing system behaviour at different amplitudes. The APRBS is # an extension of PRBS by introducing randomly delayed shifts to random # levels of range-bound amplitudes (rather than between a maximum and # minimum). # # Input parameters: # n_samples: Number of required samples # alpha: tuple of (min_amplitude, max_amplitude) # tau: tuple of (min_delay, max_delay) # Extract signal parameters n_samples = parms['n_samples'] # Number of samples tau = parms['tau'] # Delay vector alpha = parms['alpha'] # Amplitude vector # Convert to usable parameters tau_min = tau[0] tau_max = tau[1] tau_range = tau_max - tau_min alpha_min = alpha[0] alpha_max = alpha[1] alpha_range = alpha_max - alpha_min # Initialize arrays tau_array = np.zeros((n_samples),dtype=int) alpha_array = np.zeros((n_samples)) signal = np.zeros((n_samples)) # Initialize counters sample_count = 0 shift_count = 0 while sample_count < n_samples: # Generate a random shift to perturb 'tau' and 'alpha' tau_shift = np.random.uniform(0.0, 1.0, 1) alpha_shift = np.random.uniform(0.0, 1.0, 1) # Introduce the random delay such that it range bound between 'tau_min' and 'tau_max' tau_array[shift_count] = np.fix(tau_min + (tau_shift * tau_range) ).astype(int) alpha_array[shift_count] = alpha_min + (alpha_shift * alpha_range) # Update counters sample_count += tau_array[shift_count] shift_count += 1 tau_array[shift_count-1] -= (sample_count - n_samples) idx = 0 for i in range(0,shift_count): idx_tmp = idx + np.arange(0,tau_array[i],1,dtype=int) signal[idx_tmp] = alpha_array[i] idx = idx + tau_array[i] return signal # Time parameters t0 = 0. # Start time dt = 0.01 # Time step t1 = 100. # End time # Time vector t = np.arange(t0, t1, dt) # Signal parameters n_samples = len(t) alpha = (-2.5, 2.5) tau = tuple(np.array([dt, 1.])/dt) u = aprbs(n_samples=n_samples, alpha=alpha, tau=tau) plt.plot(t,u) plt.show()
35
94
0.634286
432
3,150
4.493056
0.319444
0.045337
0.030912
0.023184
0.046368
0.02576
0.02576
0.02576
0.02576
0
0
0.016829
0.283175
3,150
89
95
35.393258
0.842781
0.446667
0
0
1
0
0.01052
0
0
0
0
0
0
1
0.02439
false
0
0.04878
0
0.097561
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
9346f4ae3f51e0598235db78c109cd4747e2c15f
1,820
py
Python
wiki-parse/node.py
mvwicky/wiki-parse
1c19e1c771b9dc96cf77fdaa4009dbd97619cc72
[ "MIT" ]
null
null
null
wiki-parse/node.py
mvwicky/wiki-parse
1c19e1c771b9dc96cf77fdaa4009dbd97619cc72
[ "MIT" ]
null
null
null
wiki-parse/node.py
mvwicky/wiki-parse
1c19e1c771b9dc96cf77fdaa4009dbd97619cc72
[ "MIT" ]
null
null
null
import os import random import sys import time from typing import ClassVar, List from urllib.parse import urlsplit import attr from bs4 import BeautifulSoup import requests # Epsilon value EPS = sys.float_info.epsilon def req(url, verbose=False): """Make a request, sleeping for a random period of time afterwards""" res = requests.get(url) # Make a request slp_tm = (random.random() + EPS) * 2.5 # Calculate sleep time if verbose: print(slp_tm) time.sleep(slp_tm) return res @attr.s(slots=True) class WikiNode(object): """A Graph Node TODO: Change outpaths to be a list of integers, indices to a global list Maybe just change everything to indices""" wiki_url: ClassVar[str] = 'https://en.wikipedia.org' link: str = attr.ib(type=str) level: int = attr.ib(type=int) out_paths: List[str] = attr.ib(default=attr.Factory(list), type=list) @property def page_name(self) -> str: return os.path.split(urlsplit(self.link).path)[1] @staticmethod def wiki_links(tag) -> bool: href = tag.attrs.get('href') if href is None: return False if 'Main_Page' in href: return False return href.startswith('/wiki') and (':' not in href) @classmethod def with_links(cls, url): ret = cls(url) ret.get_links() return ret def find_links(self): links = set() res = req(self.link) if res.status_code != requests.codes.ok: return links soup = BeautifulSoup(res.content, 'lxml') for link in soup(self.wiki_links): links.add(''.join((self.wiki_url, link['href']))) links -= {self.link} return links def get_links(self): self.out_paths.extend(self.find_links())
26.376812
76
0.626374
255
1,820
4.4
0.45098
0.013369
0.02139
0
0
0
0
0
0
0
0
0.002976
0.261538
1,820
68
77
26.764706
0.831845
0.135165
0
0.078431
0
0
0.03284
0
0
0
0
0.014706
0
1
0.117647
false
0
0.176471
0.019608
0.54902
0.019608
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
934b9553c57c50df3dbe6b024a339fd09db698e0
2,395
py
Python
props/graph_representation/proposition.py
kshabahang/props
d3cc981f778185769b4dc2816aecaf66d21d0e91
[ "MIT" ]
null
null
null
props/graph_representation/proposition.py
kshabahang/props
d3cc981f778185769b4dc2816aecaf66d21d0e91
[ "MIT" ]
null
null
null
props/graph_representation/proposition.py
kshabahang/props
d3cc981f778185769b4dc2816aecaf66d21d0e91
[ "MIT" ]
null
null
null
from props.dependency_tree.definitions import subject_dependencies, ARG_LABEL,\ object_dependencies, SOURCE_LABEL, domain_label, POSSESSED_LABEL,\ POSSESSOR_LABEL class Proposition: def __init__(self,pred,args,outputType): self.pred = pred self.args = args self.outputType = outputType for ent in self.args: (rel,arg) = ent if rel == POSSESSOR_LABEL: ent[1] = fixPossessor(arg) def find_ent(self,ent): ret = [] for i,(rel,arg) in enumerate(self.args): if ent in arg: ret.append(i) return ret def rel_order(self,rel): if rel in subject_dependencies+[domain_label,POSSESSED_LABEL,POSSESSOR_LABEL]: return 0 if rel == ARG_LABEL: return 1 if rel in object_dependencies: return 2 if rel.startswith("prep"): return 3 if rel == SOURCE_LABEL: return 5 else: return 4 def __str__(self): PDF = (self.outputType == "pdf") HTML = (self.outputType == "html") if PDF: bold = lambda t:t color = lambda t,color:t if HTML: bold = lambda t:"<b>{0}</b>".format(t) color = lambda t,color:'<font color="{0}">{1}</font>'.format(color,t) curProp = r'{0}:({1})'.format(bold(self.pred), ", ".join([rel + ":" + bold(color(arg,"blue")) for rel,arg in sorted(self.args,key=lambda(rel,_):self.rel_order(rel))])) return curProp mapPossessive = {"my":"I", "your":"you", "its":"it", "her":"she", "his":"he", "our":"we", "their":"they"} def fixPossessor(possessor): """ fix phrasing in a given possessor node, such as "its -> it" "her -> she" "his -> he", etc. """ return mapPossessive.get(possessor.lower().lstrip().rstrip(),possessor) # if not (len(possessor.text) == 1): # return # # curWord = possessor.text[0].word.lower() # possessor.text = [Word(index=possessor.text[0].index, # word=mapPossessive.get(curWord, curWord))]
33.263889
159
0.499791
264
2,395
4.42803
0.352273
0.025663
0.034217
0.042772
0.124893
0.094098
0
0
0
0
0
0.009947
0.370355
2,395
71
160
33.732394
0.765252
0.095198
0
0
0
0
0.053057
0.011117
0
0
0
0
0
0
null
null
0
0.019231
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
934e7112faa4303ecb5d8b5fd5fd635de5c0afd2
568
py
Python
openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py
jonclothcat/OpenPype
d1208cbebc0a7f378de0062ccd653295c6399195
[ "MIT" ]
87
2021-05-07T08:40:46.000Z
2022-03-19T00:36:25.000Z
openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py
zafrs/OpenPype
4b8e7e1ed002fc55b31307efdea70b0feaed474f
[ "MIT" ]
1,019
2021-04-26T06:22:56.000Z
2022-03-31T16:30:43.000Z
openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py
zafrs/OpenPype
4b8e7e1ed002fc55b31307efdea70b0feaed474f
[ "MIT" ]
33
2021-04-29T12:35:54.000Z
2022-03-25T14:48:42.000Z
from pyblish import api import openpype.api as pype class IntegrateVersionUpWorkfile(api.ContextPlugin): """Save as new workfile version""" order = api.IntegratorOrder + 10.1 label = "Version-up Workfile" hosts = ["hiero"] optional = True active = True def process(self, context): project = context.data["activeProject"] path = context.data.get("currentFile") new_path = pype.version_up(path) if project: project.saveAs(new_path) self.log.info("Project workfile was versioned up")
23.666667
58
0.653169
66
568
5.575758
0.621212
0.048913
0
0
0
0
0
0
0
0
0
0.007009
0.246479
568
23
59
24.695652
0.852804
0.049296
0
0
0
0
0.151685
0
0
0
0
0
0
1
0.066667
false
0
0.133333
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
935e6905b97081210e7bed7902277443d7f5464d
3,201
py
Python
tunacell/plotting/defs.py
HuggyHugMe/tunacell
5a7a7a58bbb557098d6638e896aa784ecc37d639
[ "MIT" ]
null
null
null
tunacell/plotting/defs.py
HuggyHugMe/tunacell
5a7a7a58bbb557098d6638e896aa784ecc37d639
[ "MIT" ]
3
2017-08-10T11:19:01.000Z
2019-08-11T11:11:00.000Z
tunacell/plotting/defs.py
HuggyHugMe/tunacell
5a7a7a58bbb557098d6638e896aa784ecc37d639
[ "MIT" ]
5
2017-08-08T22:31:24.000Z
2021-08-06T04:08:08.000Z
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ tunacell package ============ plotting/defs.py module ~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ DEFAULT_COLORS = ('red', 'blue', 'purple', 'green', 'yellowgreen', 'cyan', 'magenta', 'indigo', 'darkorange', 'pink', 'yellow') colors = DEFAULT_COLORS # plotting parameters params = {'length': { 'bottom': 1., 'top': 8., 'delta': 2., 'unit': '$\mu$m' }, 'dot_length': { 'bottom': 1e-2, 'top': 1e-1, 'delta': 3e-2, 'unit': '$\mu$m/hr' }, 'dotlog_length': { 'bottom': 0.5, 'top': 2.5, 'delta': 0.5, 'unit': 'dbs/hr' }, 'width': { 'bottom': .5, 'top': 1.5, 'delta': .2, 'unit': '$\mu$m' }, 'fluo': { 'bottom': 1e5, 'top': 2e6, 'delta': 5e5, 'unit': 'A.U.' }, 'dot_fluo': { 'bottom': 1e2, 'top': 5e4, 'delta': 1e4, 'unit': 'A.U./hr' }, 'dotlog_fluo': { 'bottom': 0.1, 'top': 3, 'delta': 0.5, 'unit': 'dbs/hr' }, 'concentration': { 'bottom': 2e5, 'top': 5e5, 'delta': 1e5, }, 'volume': { 'bottom': 0., 'top': 4., 'delta': 1., 'unit': '$\mu$m$^3$' }, 'area': { 'bottom': 1., 'top': 8., 'delta': 2., 'unit': '$\mu$m$^2$' }, 'dotlog_area': { 'bottom': 0.5, 'top': 2.5, 'delta': 0.5, 'unit': 'dbs/hr' }, 'density': { 'bottom': 1e5, 'top': 4e5, 'delta': 1e5 }, 'ALratio': { 'bottom': .1, 'top': 1.5, 'delta': .4, 'unit': '$\mu$m' }, 'age': { 'bottom': 0., 'top': 1. } } def get_params(obs, params, *keys): return [params[obs][k] for k in keys]
31.693069
74
0.219931
197
3,201
3.532995
0.370558
0.051724
0.060345
0.045977
0.193966
0.175287
0.152299
0.152299
0.152299
0.083333
0
0.059423
0.63199
3,201
101
75
31.693069
0.531409
0.045611
0
0.287356
0
0
0.159225
0
0
0
0
0
0
1
0.011494
false
0
0
0.011494
0.022989
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
93625b79b6238b332e1234a333a23eb72571d1f2
4,034
py
Python
tordatahub/tests/create_topics.py
jasonz93/python-tordatahub
3a9a497d5a0bebf915d7e24049dd8b06099e3c04
[ "Apache-2.0" ]
null
null
null
tordatahub/tests/create_topics.py
jasonz93/python-tordatahub
3a9a497d5a0bebf915d7e24049dd8b06099e3c04
[ "Apache-2.0" ]
null
null
null
tordatahub/tests/create_topics.py
jasonz93/python-tordatahub
3a9a497d5a0bebf915d7e24049dd8b06099e3c04
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys import traceback from tordatahub import DataHub from tordatahub.utils import Configer from tordatahub.models import Project, Topic, RecordType, FieldType, RecordSchema, TupleRecord, CursorType from tordatahub.errors import DatahubException, ObjectAlreadyExistException configer = Configer('tordatahub.ini') access_id = configer.get('tordatahub', 'access_id', '') access_key = configer.get('tordatahub', 'access_key', '') endpoint = configer.get('tordatahub', 'endpoint', '') project_name = configer.get('tordatahub', 'project_name', 'meter_project_test') topic_name = configer.get('tordatahub', 'topic_name', 'meter_topic_test') print "=======================================" print "access_id: %s" % access_id print "access_key: %s" % access_key print "endpoint: %s" % endpoint print "project_name: %s" % project_name print "topic_name: %s" % topic_name print "=======================================\n\n" if not access_id or not access_key or not endpoint: print "access_id and access_key and endpoint must be set!" sys.exit(-1) dh = DataHub(access_id, access_key, endpoint) try: for pi in range(1,10): project_name = "meter_project_test_%d" % pi project = Project(name=project_name, comment="meter project test") try: dh.create_project(project) print "create project %s success!" % project_name print "=======================================\n\n" except ObjectAlreadyExistException, e: print "project %s already exist!" % project_name for ti in range(1,100): topic_name = "meter_topic_test_%d_%d" %(pi, ti) topic = Topic(name=topic_name) topic.project_name = project_name topic.shard_count = 20 topic.life_cycle = 7 topic.record_type = RecordType.TUPLE topic.record_schema = RecordSchema.from_lists(['bigint_field', 'string_field', 'double_field', 'bool_field', 'time_field'], [FieldType.BIGINT, FieldType.STRING, FieldType.DOUBLE, FieldType.BOOLEAN, FieldType.TIMESTAMP]) try: dh.create_topic(topic) print "create topic %s success!" % topic_name # block等待所有shard状态ready dh.wait_shards_ready(project_name, topic_name) print "shards all ready!!!" shards = dh.list_shards(project_name, topic_name) for shard in shards: record = TupleRecord(schema=topic.record_schema, values=[1, 'yc1', 10.01, True, 1455869335000000]) record.shard_id = shard.shard_id record.put_attribute('AK', '47') records = [] records.append(record) failed_indexs = dh.put_records(project_name, topic_name, records) print "put record to project:%s topic:%s failed_index:%s" %(project_name, topic_name, failed_indexs) except ObjectAlreadyExistException, e: print "topic %s already exist!" % topic_name print "=======================================\n\n" except Exception, e: print traceback.format_exc() sys.exit(-1)
45.840909
231
0.645513
491
4,034
5.150713
0.350306
0.065243
0.041518
0.031633
0.061289
0
0
0
0
0
0
0.013091
0.223599
4,034
87
232
46.367816
0.794381
0.203024
0
0.163934
0
0
0.226533
0.06602
0
0
0
0
0
0
null
null
0
0.098361
null
null
0.278689
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
936b0f95e7f3ca1f6eafc51508f66b72158c5fa4
1,247
py
Python
python/misc/switcharoo.py
christopher-burke/warmups
140c96ada87ec5e9faa4622504ddee18840dce4a
[ "MIT" ]
null
null
null
python/misc/switcharoo.py
christopher-burke/warmups
140c96ada87ec5e9faa4622504ddee18840dce4a
[ "MIT" ]
2
2022-03-10T03:49:14.000Z
2022-03-14T00:49:54.000Z
python/misc/switcharoo.py
christopher-burke/warmups
140c96ada87ec5e9faa4622504ddee18840dce4a
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 """Switcharoo. Create a function that takes a string and returns a new string with its first and last characters swapped, except under three conditions: If the length of the string is less than two, return "Incompatible.". If the argument is not a string, return "Incompatible.". If the first and last characters are the same, return "Two's a pair.". Source: https://edabit.com/challenge/tnKZCAkdnZpiuDiWA """ def flip_end_chars(txt): """Flip the first and last characters if txt is a string.""" if isinstance(txt, str) and txt and len(txt) > 1: first, last = txt[0], txt[-1] if first == last: return "Two's a pair." return "{}{}{}".format(last, txt[1:-1], first) return "Incompatible." def main(): assert flip_end_chars("Cat, dog, and mouse.") == ".at, dog, and mouseC" assert flip_end_chars("Anna, Banana") == "anna, BananA" assert flip_end_chars("[]") == "][" assert flip_end_chars("") == "Incompatible." assert flip_end_chars([1, 2, 3]) == "Incompatible." assert flip_end_chars("dfdkf49824fdfdfjhd") == "Two's a pair." assert flip_end_chars("#343473847#") == "Two's a pair." print('Passed.') if __name__ == "__main__": main()
31.175
75
0.655172
181
1,247
4.381215
0.403315
0.070618
0.121059
0.15889
0.176545
0
0
0
0
0
0
0.023023
0.198877
1,247
39
76
31.974359
0.770771
0.386528
0
0
0
0
0.260292
0
0
0
0
0
0.388889
1
0.111111
false
0.055556
0
0
0.277778
0.055556
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
936be74b5161c4ea335dcc47745288106a326b76
244
py
Python
1/one.py
TheFrederick-git/adventofcode2021
a320f3bba2655afab1aad8bf2520ccb705b2fd1e
[ "MIT" ]
null
null
null
1/one.py
TheFrederick-git/adventofcode2021
a320f3bba2655afab1aad8bf2520ccb705b2fd1e
[ "MIT" ]
null
null
null
1/one.py
TheFrederick-git/adventofcode2021
a320f3bba2655afab1aad8bf2520ccb705b2fd1e
[ "MIT" ]
null
null
null
"""1/1 adventofcode""" with open("input.txt", "r", encoding="UTF-8") as i_file: data = list(map(int, i_file.read().splitlines())) values = ["i" if data[i] > data[i - 1] else "d" for i in range(1, len(data))] print(values.count("i"))
34.857143
78
0.598361
43
244
3.348837
0.674419
0.069444
0
0
0
0
0
0
0
0
0
0.02451
0.163934
244
6
79
40.666667
0.681373
0.065574
0
0
0
0
0.083333
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
93700816a23a94674ae0ee1adf228796c45d26ba
740
py
Python
CMS/test/mocks/search_mocks.py
office-for-students/wagtail-CMS
98789c279edf48f2bbedb5415437da3317f0e12b
[ "MIT" ]
4
2019-06-04T07:18:44.000Z
2020-06-15T22:27:36.000Z
CMS/test/mocks/search_mocks.py
office-for-students/wagtail-CMS
98789c279edf48f2bbedb5415437da3317f0e12b
[ "MIT" ]
38
2019-05-09T13:14:56.000Z
2022-03-12T00:54:57.000Z
CMS/test/mocks/search_mocks.py
office-for-students/wagtail-CMS
98789c279edf48f2bbedb5415437da3317f0e12b
[ "MIT" ]
3
2019-09-26T14:32:36.000Z
2021-05-06T15:48:01.000Z
import json from requests.models import Response from http import HTTPStatus from CMS.test.mocks.search_mocks_content import content class SearchMocks: @classmethod def get_search_response_content(cls): return content; @classmethod def get_successful_search_response(cls): response = Response() response.status_code = HTTPStatus.OK response._content = json.dumps(cls.get_search_response_content()).encode('utf-8') return response @classmethod def get_unsuccessful_search_response(cls): response = Response() response.status_code = HTTPStatus.INTERNAL_SERVER_ERROR response._content = json.dumps(None).encode('utf-8') return response
27.407407
89
0.714865
85
740
5.988235
0.388235
0.11002
0.100196
0.094303
0.333988
0.239686
0.239686
0.239686
0.239686
0
0
0.003425
0.210811
740
26
90
28.461538
0.868151
0
0
0.35
0
0
0.013514
0
0
0
0
0
0
1
0.15
false
0
0.2
0.05
0.55
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
9371f372d231442d071f230b9b470e2409d71503
3,157
py
Python
scripts/pughpore/passagetime-simple.py
jhwnkim/nanopores
98b3dbb5d36464fbdc03f59d224d38e4255324ce
[ "MIT" ]
8
2016-09-07T01:59:31.000Z
2021-03-06T12:14:31.000Z
scripts/pughpore/passagetime-simple.py
jhwnkim/nanopores
98b3dbb5d36464fbdc03f59d224d38e4255324ce
[ "MIT" ]
null
null
null
scripts/pughpore/passagetime-simple.py
jhwnkim/nanopores
98b3dbb5d36464fbdc03f59d224d38e4255324ce
[ "MIT" ]
4
2017-12-06T17:43:01.000Z
2020-05-01T05:41:14.000Z
# -*- coding: utf-8 -*- from __future__ import unicode_literals # (c) 2017 Gregor Mitscha-Baude # TODO: obtain rD from actual simulation from nanopores import fields, kT, eta, qq, savefigs from numpy import exp, pi, sqrt, linspace, diff, array, dot L = 46e-9 # length of pore r = 2.0779e-9 # radius of protein trypsin V = 0.08 # applied potential E = V/L # electric field rD = 0.2 #D = rD* kT/(6.*pi*eta*r) # diffusion constant (Stokes) # load translocation events without binding name = "events3_nobind_new" fields.set_dir_mega() data = fields.get_fields(name) # take only events that translocated data.pop("b1") data.pop("b2") data, _ = fields._subset(data, data["ood"], lambda x: x==0) data, times = fields._sorted(data, data["t"]) print "mean" D = array(data["Dzavg"]).mean() * 1e-9 F = -array(data["Fzavg"]).mean() v = D/kT * F # electrophoretic velocity print "D = ", D, "F = ", F, "v = ", v print "at x = (0,0,0)" D = 6.8e-12 F = 1.5e-11 v = D/kT * F # electrophoretic velocity print "D = ", D, "F = ", F, "v = ", v def mean(lst): return sum(lst)/float(len(lst)) def maximum_likelihood(times, n=10): times = 1e-3 * array(times) T = mean(times) Tinv = mean([1./t for t in times]) def amean(v): return mean([1./(1. + L/(v*t)) for t in times]) def fix(v): a = amean(v) factor = (sqrt((a-.5)**2 + T*Tinv*a*(1-a)) - (a-.5))/(1-a) print a #print factor return L/T * factor v = L*sqrt(Tinv/T) # this initial guess is accurate to 1e-7!! for i in range(n): v0 = v #print "i = %d: v = %s" % (i, v) v = fix(v) print "i = %d: dv = %s" % (i, abs(v-v0)) D = v**2/2.*T - v*L + L**2/2.*Tinv return v, D v, D = maximum_likelihood(times) print "maximum likelihood" print "D = ", D, "F = ", v*kT/D, "v = ", v # simple 1D model from Talaga2009 def p(t, timescale=1.): # timescale: 1 -> s, 1e-3 -> ms etc t *= timescale return exp(-(L - t*v)**2/(4.*t*D)) * (L + t*v) / (4.*t * sqrt(pi*t*D)) def pp(times, timescale=1.): return array([p(t, timescale) for t in times]) def integrate(t, pt): pt = array(pt) dt = diff(t) values = 0.5*(pt[:-1] + pt[1:]) return dot(values, dt) def integrate_hist(hist): n, bins, _ = hist dt = diff(bins) return dot(n, dt) # scale times scale = 1e-6 # microseconds times = [t*1e-3/scale for t in times] from matplotlib import pyplot as plt t = linspace(1e-9/scale, 8e-6/scale, 500) hist = plt.hist(times, bins=30, color="#aaaaff", linewidth=0.5, weights=[1./500.]*len(times), label="BD simulations") pt = pp(t, scale) * integrate_hist(hist) * scale plt.plot(t, pt, "-", color="g", linewidth=3, label="FPT model") plt.legend(loc="upper right", frameon=False) plt.xlabel(u"dwell time [µs]") plt.ylabel(u"rel. frequency") print "integral", integrate_hist(hist), "==", integrate(t, pt) #plt.figure() #plt.hist(data["Fzavg"], bins=30, color="#aaaaff", linewidth=0.5) # #plt.figure() #plt.hist(data["Dzavg"], bins=30, color="#aaaaff", linewidth=0.5) from folders import FIGDIR savefigs("current-nobind-hist", FIGDIR + "/rw")
25.877049
74
0.596136
533
3,157
3.49531
0.347092
0.004294
0.012882
0.023618
0.132045
0.10306
0.086957
0.041868
0.041868
0.041868
0
0.041146
0.214761
3,157
121
75
26.090909
0.710367
0.21191
0
0.051948
0
0
0.092314
0
0
0
0
0.008264
0
0
null
null
0
0.064935
null
null
0.116883
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
937335c45d766563fe6c185b76d6ad2c9b3fea7a
652
py
Python
examples/plots/plot_quaternion_integrate.py
Mateus224/pytransform3d-1
26f1d39c5fa5f5c400fdabc7e58f645c7a35bee5
[ "BSD-3-Clause" ]
null
null
null
examples/plots/plot_quaternion_integrate.py
Mateus224/pytransform3d-1
26f1d39c5fa5f5c400fdabc7e58f645c7a35bee5
[ "BSD-3-Clause" ]
null
null
null
examples/plots/plot_quaternion_integrate.py
Mateus224/pytransform3d-1
26f1d39c5fa5f5c400fdabc7e58f645c7a35bee5
[ "BSD-3-Clause" ]
null
null
null
""" ====================== Quaternion Integration ====================== Integrate angular velocities to a sequence of quaternions. """ import numpy as np import matplotlib.pyplot as plt from pytransform3d.rotations import quaternion_integrate, matrix_from_quaternion, plot_basis angular_velocities = np.empty((21, 3)) angular_velocities[:, :] = np.array([np.sqrt(0.5), np.sqrt(0.5), 0.0]) angular_velocities *= np.pi Q = quaternion_integrate(angular_velocities, dt=0.1) ax = None for t in range(len(Q)): R = matrix_from_quaternion(Q[t]) p = 2 * (t / (len(Q) - 1) - 0.5) * np.ones(3) ax = plot_basis(ax=ax, s=0.15, R=R, p=p) plt.show()
27.166667
92
0.654908
101
652
4.108911
0.475248
0.204819
0.137349
0.038554
0
0
0
0
0
0
0
0.035651
0.139571
652
23
93
28.347826
0.7041
0.196319
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.230769
0
0.230769
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
937962d2ebcdc2a6b9460ca91cb7e76e7ac6b49e
1,434
py
Python
what.py
manastech/de-bee
32d4084b7d765b2766a7b8b947e896bb6f81beb9
[ "MIT" ]
1
2015-07-30T07:37:20.000Z
2015-07-30T07:37:20.000Z
what.py
manastech/de-bee
32d4084b7d765b2766a7b8b947e896bb6f81beb9
[ "MIT" ]
null
null
null
what.py
manastech/de-bee
32d4084b7d765b2766a7b8b947e896bb6f81beb9
[ "MIT" ]
null
null
null
from google.appengine.ext import webapp from wsgiref.handlers import CGIHandler from model import Membership from model import Group from model import Transaction class WhatHandler(webapp.RequestHandler): def get(self): page = self.request.get('p'); if page is None or page == '': page = 1 else: page = int(page) offset = (page - 1) * 20 if page != 1: self.response.out.write("<a href=\"?p=%s\">Previous</a> | " % (page - 1)) self.response.out.write(" &nbsp; %s &nbsp; " % page) self.response.out.write(" | <a href=\"?p=%s\">Next</a>" % (page + 1)) self.response.out.write("<br/><br/>") self.response.out.write("<ul>") for tr in Transaction.gql("ORDER BY date DESC LIMIT %s, %s" % (offset, 20)): try: self.response.out.write("<li>In %s: %s <b>%s</b> %s ($%s due to \"%s\", %s)</li>" % ( tr.group.name, tr.fromMember.userNick, tr.type, tr.toMember.userNick, tr.amount, tr.reason, tr.date)) except: self.response.out.write("<li style=\"color:blue\">Group must have been deleted...</li>") self.response.out.write("</ul>") def main(): application = webapp.WSGIApplication([ ('/what', WhatHandler), ], debug=True) CGIHandler().run(application)
30.510638
93
0.535565
179
1,434
4.290503
0.435754
0.125
0.15625
0.208333
0.259115
0.144531
0.138021
0.070313
0
0
0
0.008911
0.295676
1,434
47
94
30.510638
0.751485
0
0
0
0
0.027027
0.156947
0
0
0
0
0
0
1
0.054054
false
0
0.135135
0
0.216216
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fa7a83fe53cfeb8f0635e9e1cf133b811d5394c2
3,713
py
Python
app/auth/forms.py
blazejosojca/flask_blog
2bac2f9c8bb60db8a5073147f35b9e088c97497b
[ "MIT" ]
null
null
null
app/auth/forms.py
blazejosojca/flask_blog
2bac2f9c8bb60db8a5073147f35b9e088c97497b
[ "MIT" ]
1
2021-06-01T23:21:11.000Z
2021-06-01T23:21:11.000Z
app/auth/forms.py
blazejosojca/flask_blog
2bac2f9c8bb60db8a5073147f35b9e088c97497b
[ "MIT" ]
null
null
null
from flask_wtf import FlaskForm from flask_wtf.file import FileAllowed, FileField from flask_babel import lazy_gettext as _l from wtforms import StringField, TextAreaField, SubmitField, PasswordField, BooleanField from wtforms.validators import DataRequired, Email, ValidationError, Length, EqualTo from app.models import User class RegistrationForm(FlaskForm): username = StringField(_l('Username', validators=[DataRequired(), Length(min=2, max=24)])) email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField(_l('Password', validators=[DataRequired()])) password_confirmation = PasswordField(_l('Confirm Password', validators=[DataRequired(), EqualTo('password')])) submit = SubmitField(_l('Sign Up')) def validate_username(self, username): user = User.query.filter_by(username=username.data).first() if user is not None: raise ValidationError( _l('This username already exists. Please use a different username!') ) def validate_email(self, email): user = User.query.filter_by(email=email.data).first() if user is not None: raise ValidationError(_l('This email already exists. Please use a different email!')) class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField(_l('Password'), validators=[DataRequired()]) remember = BooleanField(_l('Remember Me')) submit = SubmitField(_l('Sign In')) class UpdateUserForm(FlaskForm): username = StringField(_l('Username'), validators=[DataRequired(), Length(min=2, max=24)]) email = StringField('Email', validators=[DataRequired(), Email()]) about_me = TextAreaField(_l('About me'), validators=[Length(min=0, max=140)]) image_file = FileField(_l('Update profile picture'), validators=[FileAllowed(['jpg', 'jpeg', 'png'])]) submit = SubmitField(_l('Submit')) def __init__(self, original_username, original_email, *args, **kwargs): super(UpdateUserForm, self).__init__(*args, **kwargs) self.original_username = original_username self.original_email = original_email def validate_username(self, username): if username.data is not self.original_username: user = User.query.filter_by(username=self.username.data).first() if user is not None: raise ValidationError(_l('Please use a different username!')) def validate_email(self, email): if email.data is not self.original_email: user = User.query.filter_by(email=self.email.data).first() if user is not None: raise ValidationError(_l('Please use a different email!')) class RequestResetForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) submit = SubmitField(_l('Request Password Reset')) def validate_email(self, email): user = User.query.filter_by(email=email.data).first() if user is None: raise ValidationError('There is no account with this email.') class ResetPasswordForm(FlaskForm): password = PasswordField('Password', validators=[DataRequired()]) password_confirmation = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')]) submit = SubmitField('Reset Password') class DeleteUserForm(FlaskForm): username = StringField('Username', validators=[DataRequired(), Length(min=2, max=24)] ) submit = SubmitField('Delete User')
43.174419
111
0.664153
395
3,713
6.113924
0.23038
0.109317
0.062112
0.039337
0.597516
0.557764
0.491097
0.42029
0.344513
0.344513
0
0.004477
0.217883
3,713
85
112
43.682353
0.827135
0
0
0.227273
0
0
0.120926
0
0
0
0
0
0
1
0.090909
false
0.151515
0.090909
0
0.590909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
fa81fc214c9e7372cdac762542256af383055ca7
1,232
py
Python
scrapy/arxiv1.py
SaeedPourjafar/ws_2021
6e91e70fc8f40007eb3c4d68282aa86b80b79363
[ "MIT" ]
null
null
null
scrapy/arxiv1.py
SaeedPourjafar/ws_2021
6e91e70fc8f40007eb3c4d68282aa86b80b79363
[ "MIT" ]
null
null
null
scrapy/arxiv1.py
SaeedPourjafar/ws_2021
6e91e70fc8f40007eb3c4d68282aa86b80b79363
[ "MIT" ]
null
null
null
# Please note that since the number of topics in computer science are exactly 40 and it's less than 100 # therefore we applied the limit on the second file (arxiv2.py) which has somewhere around 700-800 outputs # To run this file please put it in the spiders folder and run the code below in terminal/cmd: # scrapy crawl topics -o topics.csv import scrapy import psutil # For memory usage import os class Link(scrapy.Item): link = scrapy.Field() class LinkListsSpider(scrapy.Spider): name = 'topics' allowed_domains = ['https://arxiv.org'] start_urls = ['https://arxiv.org'] def parse(self, response): # We are looking for the list of topics under Computer Science section # i.e. from 'Artificial Intelligence' all the way to 'Systems and Control' xpath = '//h2/following-sibling::h2[contains(text(),"Computer Science")]/following-sibling::ul/li/a[re:test(@id, "cs\..*")]/@href' selection = response.xpath(xpath) for s in selection: l = Link() l['link'] = 'https://arxiv.org' + s.get() yield l print("Memory usage in MB:",round(psutil.Process(os.getpid()).memory_info().rss / 1024 ** 2,2))
39.741935
139
0.650162
177
1,232
4.508475
0.644068
0.056391
0.048872
0
0
0
0
0
0
0
0
0.021186
0.233766
1,232
30
140
41.066667
0.824153
0.400162
0
0
0
0.058824
0.285714
0.145714
0
0
0
0
0
1
0.058824
false
0
0.176471
0
0.588235
0.058824
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
fa9523caf9ac0424eb71bc4eef12115550d20826
3,738
py
Python
scripts/gen_app_yaml.py
MatthewWilkes/mw4068-packaging
5c5d50eea89372e967994dac3bd8b06d25b4f0fa
[ "Apache-2.0" ]
null
null
null
scripts/gen_app_yaml.py
MatthewWilkes/mw4068-packaging
5c5d50eea89372e967994dac3bd8b06d25b4f0fa
[ "Apache-2.0" ]
null
null
null
scripts/gen_app_yaml.py
MatthewWilkes/mw4068-packaging
5c5d50eea89372e967994dac3bd8b06d25b4f0fa
[ "Apache-2.0" ]
null
null
null
#! /usr/bin/env python2.5 # Copyright 2009 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """gen_app_yaml.py [-f] [-o] (-i | APPLICATION_NAME) A script to generate the app.yaml from the template with an application name filled in. Arguments: APPLICATION_NAME: the name to use for the application (no underscores) """ from __future__ import with_statement __authors__ = [ # alphabetical order by last name, please '"Dan Bentley" <dbentley@google.com>', ] import os import sys from optparse import OptionParser def generateAppYaml(application_name, force=False, override_version=None): """Generate the app.yaml file. Args: application_name: str, the name to write into the application filed force: bool, whether to overwrite an existing app.yaml override_version: str, the manual version to use """ scripts_directory = os.path.dirname(__file__) app_dir = os.path.abspath(os.path.join(scripts_directory, '../app')) template_path = os.path.join(app_dir, 'app.yaml.template') app_yaml_path = os.path.join(app_dir, 'app.yaml') if not os.path.exists(template_path): sys.exit("Template file %s non-existent. Corrupt client?" % template_path) if os.path.exists(app_yaml_path): if not force: sys.exit("%s exists; exiting. To overwrite, pass -f on the command-line" % app_yaml_path) with open(template_path) as infile: template_contents = infile.read() contents = template_contents.replace( '# application: FIXME', 'application: '+ application_name) if override_version: # find the "version" field stop = contents.find("version: ") # find the next \n after it end = contents.find("\n", stop) # insert new version app_yaml_contents = contents[:stop+9] + override_version + contents[end:] else: app_yaml_contents = contents with open(app_yaml_path, 'w') as outfile: outfile.write(app_yaml_contents) print "Wrote application name %s to %s." % (application_name, app_yaml_path) def usage(msg): """Print an error message and the usage of the program; then quit. """ sys.exit('Error: %s\n\n%s' % (msg, __doc__)) def main(): """Main program. """ args = sys.argv[1:] parser = OptionParser(usage=__doc__) parser.add_option("-f", "--force", action="store_true", default=False, help="Overwrite existing app.yaml") parser.add_option("-i", "--interactive", action="store_true", default=False, help="Ask for the application name interactively") parser.add_option("-o", "--override-version", help="Uses the specified version instead of the one from app.yaml.template") options, args = parser.parse_args(args) if options.interactive: if args: parser.error("Cannot combine application name with -i") sys.stdout.write("Application name: ") application_name = sys.stdin.readline().strip() else: if len(args) != 1: parser.error("No application name supplied.") application_name = args[0] generateAppYaml(application_name, force=options.force, override_version=options.override_version) if __name__ == '__main__': main() # strip off the binary name
30.639344
96
0.697967
518
3,738
4.88417
0.378378
0.044269
0.021739
0.012648
0.04585
0.04585
0.021344
0.021344
0
0
0
0.004633
0.191546
3,738
121
97
30.892562
0.832561
0.191546
0
0.035088
0
0
0.228571
0.008571
0
0
0
0
0
0
null
null
0.017544
0.070175
null
null
0.017544
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fa96f592ecf04753046dc1b36c84fb0196c05868
11,242
py
Python
gputools/transforms/transformations.py
jni/gputools
cecaa9565b9be0c716e6c11b0914bbbdfd75657c
[ "BSD-3-Clause" ]
null
null
null
gputools/transforms/transformations.py
jni/gputools
cecaa9565b9be0c716e6c11b0914bbbdfd75657c
[ "BSD-3-Clause" ]
null
null
null
gputools/transforms/transformations.py
jni/gputools
cecaa9565b9be0c716e6c11b0914bbbdfd75657c
[ "BSD-3-Clause" ]
null
null
null
""" scaling images """ from __future__ import print_function, unicode_literals, absolute_import, division import logging logger = logging.getLogger(__name__) import os import numpy as np import warnings from gputools import OCLArray, OCLImage, OCLProgram from gputools.core.ocltypes import cl_buffer_datatype_dict from gputools.utils import mat4_rotate, mat4_translate from ._abspath import abspath from mako.template import Template def affine(data, mat=np.identity(4), output_shape=None, mode="constant", interpolation="linear", res_g=None): """ affine transform data with matrix mat, which is the inverse coordinate transform matrix (similar to ndimage.affine_transform) Parameters ---------- data, ndarray or OCLImage 3d array to be transformed mat, ndarray or OCLArray 3x3 or 4x4 inverse coordinate transform matrix output_shape: tuple of ints shape of transformed array mode: string boundary mode, one of the following: 'constant' pads with zeros 'edge' pads with edge values 'wrap' pads with the repeated version of the input interpolation, string interpolation mode, one of the following 'linear' 'nearest' Returns ------- res: ndarray or openCL array transformed array (same shape as input) """ warnings.warn( "gputools.transform.affine: API change as of gputools>= 0.2.8: the inverse of the matrix is now used as in scipy.ndimage.affine_transform") if data.ndim != 3: raise ValueError("input data has to be a 3d array!") interpolation_defines = {"linear": ["-D", "SAMPLER_FILTER=CLK_FILTER_LINEAR"], "nearest": ["-D", "SAMPLER_FILTER=CLK_FILTER_NEAREST"]} mode_defines = {"constant": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_CLAMP"], "wrap": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_REPEAT"], "edge": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_CLAMP_TO_EDGE"] } if not interpolation in interpolation_defines: raise KeyError( "interpolation = '%s' not defined ,valid: %s" % (interpolation, list(interpolation_defines.keys()))) if not mode in mode_defines: raise KeyError("mode = '%s' not defined ,valid: %s" % (mode, list(mode_defines.keys()))) # reorder matrix, such that x,y,z -> z,y,x (as the kernel is assuming that) if output_shape is None: output_shape = data.shape if isinstance(data, OCLImage): d_im = data else: d_im = OCLImage.from_array(data.astype(np.float32, copy=False)) if res_g is None: res_g = OCLArray.empty(output_shape, np.float32) mat_inv_g = OCLArray.from_array(mat.astype(np.float32, copy=False)) prog = OCLProgram(abspath("kernels/affine.cl") , build_options=interpolation_defines[interpolation] + mode_defines[mode]) prog.run_kernel("affine3", output_shape[::-1], None, d_im, res_g.data, mat_inv_g.data) if isinstance(data, OCLImage): return res_g else: return res_g.get() def shift(data, shift=(0, 0, 0), mode="constant", interpolation="linear"): """ translates 3d data by given amount Parameters ---------- data: ndarray 3d array shift : float or sequence The shift along the axes. If a float, `shift` is the same for each axis. If a sequence, `shift` should contain one value for each axis. mode: string boundary mode, one of the following: 'constant' pads with zeros 'edge' pads with edge values 'wrap' pads with the repeated version of the input interpolation, string interpolation mode, one of the following 'linear' 'nearest' Returns ------- res: ndarray shifted array (same shape as input) """ if np.isscalar(shift): shift = (shift,) * 3 if len(shift) != 3: raise ValueError("shift (%s) should be of length 3!") shift = -np.array(shift) return affine(data, mat4_translate(*shift), mode=mode, interpolation=interpolation) def rotate(data, axis=(1., 0, 0), angle=0., center=None, mode="constant", interpolation="linear"): """ rotates data around axis by a given angle Parameters ---------- data: ndarray 3d array axis: tuple axis to rotate by angle about axis = (x,y,z) angle: float center: tuple or None origin of rotation (cz,cy,cx) in pixels if None, center is the middle of data mode: string boundary mode, one of the following: 'constant' pads with zeros 'edge' pads with edge values 'wrap' pads with the repeated version of the input interpolation, string interpolation mode, one of the following 'linear' 'nearest' Returns ------- res: ndarray rotated array (same shape as input) """ if center is None: center = tuple([s // 2 for s in data.shape]) cx, cy, cz = center m = np.dot(mat4_translate(cx, cy, cz), np.dot(mat4_rotate(angle, *axis), mat4_translate(-cx, -cy, -cz))) m = np.linalg.inv(m) return affine(data, m, mode=mode, interpolation=interpolation) def map_coordinates(data, coordinates, interpolation="linear", mode='constant'): """ Map data to new coordinates by interpolation. The array of coordinates is used to find, for each point in the output, the corresponding coordinates in the input. should correspond to scipy.ndimage.map_coordinates Parameters ---------- data coordinates output interpolation mode cval prefilter Returns ------- """ if not (isinstance(data, np.ndarray) and data.ndim in (2, 3)): raise ValueError("input data has to be a 2d or 3d array!") coordinates = np.asarray(coordinates, np.int32) if not (coordinates.shape[0] == data.ndim): raise ValueError("coordinate has to be of shape (data.ndim,m) ") interpolation_defines = {"linear": ["-D", "SAMPLER_FILTER=CLK_FILTER_LINEAR"], "nearest": ["-D", "SAMPLER_FILTER=CLK_FILTER_NEAREST"]} mode_defines = {"constant": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_CLAMP"], "wrap": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_REPEAT"], "edge": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_CLAMP_TO_EDGE"] } if not interpolation in interpolation_defines: raise KeyError( "interpolation = '%s' not defined ,valid: %s" % (interpolation, list(interpolation_defines.keys()))) if not mode in mode_defines: raise KeyError("mode = '%s' not defined ,valid: %s" % (mode, list(mode_defines.keys()))) if not data.dtype.type in cl_buffer_datatype_dict: raise KeyError("dtype %s not supported yet (%s)" % (data.dtype.type, tuple(cl_buffer_datatype_dict.keys()))) dtype_defines = ["-D", "DTYPE=%s" % cl_buffer_datatype_dict[data.dtype.type]] d_im = OCLImage.from_array(data) coordinates_g = OCLArray.from_array(coordinates.astype(np.float32, copy=False)) res_g = OCLArray.empty(coordinates.shape[1], data.dtype) prog = OCLProgram(abspath("kernels/map_coordinates.cl") , build_options=interpolation_defines[interpolation] + mode_defines[mode] + dtype_defines) kernel = "map_coordinates{ndim}".format(ndim=data.ndim) prog.run_kernel(kernel, (coordinates.shape[-1],), None, d_im, res_g.data, coordinates_g.data) return res_g.get() def geometric_transform(data, mapping = "c0,c1", output_shape=None, mode='constant', interpolation="linear"): """ Apply an arbitrary geometric transform. The given mapping function is used to find, for each point in the output, the corresponding coordinates in the input. The value of the input at those coordinates is determined by spline interpolation of the requested order. Parameters ---------- %(input)s mapping : {callable, scipy.LowLevelCallable} A callable object that accepts a tuple of length equal to the output array rank, and returns the corresponding input coordinates as a tuple of length equal to the input array rank. """ if not (isinstance(data, np.ndarray) and data.ndim in (2, 3)): raise ValueError("input data has to be a 2d or 3d array!") interpolation_defines = {"linear": ["-D", "SAMPLER_FILTER=CLK_FILTER_LINEAR"], "nearest": ["-D", "SAMPLER_FILTER=CLK_FILTER_NEAREST"]} mode_defines = {"constant": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_CLAMP"], "wrap": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_REPEAT"], "edge": ["-D", "SAMPLER_ADDRESS=CLK_ADDRESS_CLAMP_TO_EDGE"] } if not interpolation in interpolation_defines: raise KeyError( "interpolation = '%s' not defined ,valid: %s" % (interpolation, list(interpolation_defines.keys()))) if not mode in mode_defines: raise KeyError("mode = '%s' not defined ,valid: %s" % (mode, list(mode_defines.keys()))) if not data.dtype.type in cl_buffer_datatype_dict: raise KeyError("dtype %s not supported yet (%s)" % (data.dtype.type, tuple(cl_buffer_datatype_dict.keys()))) dtype_defines = ["-D", "DTYPE={type}".format(type=cl_buffer_datatype_dict[data.dtype.type])] image_functions = {np.float32:"read_imagef", np.uint8: "read_imageui", np.uint16: "read_imageui", np.int32: "read_imagei"} image_read_defines = ["-D","READ_IMAGE=%s"%image_functions[data.dtype.type]] with open(abspath("kernels/geometric_transform.cl"), "r") as f: tpl = Template(f.read()) output_shape = tuple(output_shape) mappings = {"FUNC2": "c1,c0", "FUNC3": "c2,c1,c0"} mappings["FUNC%d" % data.ndim] = ",".join(reversed(mapping.split(","))) rendered = tpl.render(**mappings) d_im = OCLImage.from_array(data) res_g = OCLArray.empty(output_shape, data.dtype) prog = OCLProgram(src_str=rendered, build_options=interpolation_defines[interpolation] + mode_defines[mode] + dtype_defines+image_read_defines) kernel = "geometric_transform{ndim}".format(ndim=data.ndim) prog.run_kernel(kernel, output_shape[::-1], None, d_im, res_g.data) return res_g.get() if __name__ == '__main__': d = np.zeros((200, 200, 200), np.float32) d[20:-20, 20:-20, 20:-20] = 1. # res = translate(d, x = 10, y = 5, z= -10 ) res = rotate(d, center=(100, 100, 100), angle=.5)
33.558209
147
0.610656
1,382
11,242
4.827062
0.181621
0.017988
0.020237
0.024284
0.539799
0.490781
0.460051
0.429171
0.426023
0.391096
0
0.013533
0.276997
11,242
334
148
33.658683
0.80721
0
0
0.384058
0
0.007246
0.204702
0.087934
0
0
0
0
0
0
null
null
0
0.072464
null
null
0.007246
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fa97031b9abd030801b3037a4db301e08dded608
776
py
Python
index.py
genesis331/fdk-object-detection
b5859a5dea0bc87f7382022d08c3616ecd7d1b85
[ "MIT" ]
null
null
null
index.py
genesis331/fdk-object-detection
b5859a5dea0bc87f7382022d08c3616ecd7d1b85
[ "MIT" ]
null
null
null
index.py
genesis331/fdk-object-detection
b5859a5dea0bc87f7382022d08c3616ecd7d1b85
[ "MIT" ]
null
null
null
import streamlit as st from streamlit import caching import os import torch from src.core.detect import Detector from src.core.utils import utils from PIL import Image import cv2 st.title('1stDayKit Object Detection') st.write('1stDayKit is a high-level Deep Learning toolkit for solving generic tasks.') uploaded_file = st.file_uploader("Choose an image...", type=["png","jpg"]) if uploaded_file is not None: st.spinner() with st.spinner(text='Loading...'): det = Detector(name="DemoDet") img = Image.open(uploaded_file) img_cv = utils.pil_to_cv2(img) output = det.predict(img_cv) out_img = det.visualize(img_cv,output,figsize=(18,18)) cv2.imwrite('tempImage.jpg', out_img) st.image('tempImage.jpg',width=700)
33.73913
86
0.706186
116
776
4.62931
0.551724
0.067039
0.040968
0
0
0
0
0
0
0
0
0.018779
0.176546
776
23
87
33.73913
0.821596
0
0
0
0
0
0.214929
0
0
0
0
0
0
1
0
false
0
0.380952
0
0.380952
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
fa9727363fedea573e43bf5393f3cdd76d8a0357
632
py
Python
modulo 2/d037/conversao.py
rafa-evangelista/PYTHON
761ec7e01f1617263bc023a6b82b599a936275ee
[ "MIT" ]
null
null
null
modulo 2/d037/conversao.py
rafa-evangelista/PYTHON
761ec7e01f1617263bc023a6b82b599a936275ee
[ "MIT" ]
null
null
null
modulo 2/d037/conversao.py
rafa-evangelista/PYTHON
761ec7e01f1617263bc023a6b82b599a936275ee
[ "MIT" ]
null
null
null
num = int(input('Digite um número: ')) print('''Qual será a base de conversão do número {} [1] para "binário" [2] para "octal" [3] para "hexadecimal"'''.format(num)) num1 = int(input('Escolha uma opção: ')) if num1 == 1: print('Você escolheu converter o número {} para binário. O valor é de {}.'.format( num, bin(num))) elif num1 == 2: print('Você escolheu converter o número {} para octal. O valor é de {}'.format( num, oct(num))) elif num1 == 3: print('Você escolheu converter o número {} para hexadecimal. O valor é de {}'.format( num, hex(num))) else: print('Escolha uma opção válida.')
33.263158
89
0.628165
95
632
4.178947
0.410526
0.09068
0.128463
0.196474
0.415617
0.415617
0.279597
0
0
0
0
0.02004
0.210443
632
18
90
35.111111
0.775551
0
0
0
0
0
0.571203
0
0
0
0
0
0
1
0
false
0
0
0
0
0.294118
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fa9c176b0c5c5750593310639cb938dd5d20b975
1,789
py
Python
src/tester.py
OompahLoompah/LinodeAPI-Client
6842b5cb461676a5ae363f242c972d8b157ea67e
[ "MIT" ]
null
null
null
src/tester.py
OompahLoompah/LinodeAPI-Client
6842b5cb461676a5ae363f242c972d8b157ea67e
[ "MIT" ]
null
null
null
src/tester.py
OompahLoompah/LinodeAPI-Client
6842b5cb461676a5ae363f242c972d8b157ea67e
[ "MIT" ]
null
null
null
from client import linodeClient import os linode = linodeClient(os.getcwd() + '/../.config') userInput = raw_input("What do you want to do?\n") if userInput == 'create': print(linode.createLinode('3', '1')) if userInput == 'destroy': userInput = raw_input("What do you want to destroy?\n") response = linode.listDisks(userInput) for disk in response['DATA']: linode.deleteDisk(userInput, str(disk['DISKID'])) print(linode.destroyLinode(userInput)) if userInput == 'cfd': linodeID = raw_input("LinodeID: ") distro = raw_input("Distro ID: ") label = raw_input("Label: ") size = raw_input("Size (MB): ") password = raw_input("Password: ") print(linode.createFromDistro(linodeID, distro, label, size, password)) linode.createDisk(linodeID, 'swap', '512', 'swap') linode.createConfig(linodeID, label, [label,'swap']) if userInput == 'config': linodeID = raw_input("LinodeID: ") label = raw_input("Label: ") disks = [] disk = raw_input("Enter disk ID: ") while disk != '': disks.append(disk) disk = raw_input("Enter disk ID: ") print(linode.createConfig(linodeID, label, disks)) if userInput == 'boot': vps = raw_input("Which Linode? ") print(linode.boot(vps)) if userInput == 'reboot': vps = raw_input("Which Linode? ") print(linode.reboot(vps)) if userInput == 'shutdown': vps = raw_input("Which Linode? ") print(linode.shutdown(vps)) if userInput == 'list disks': vps = raw_input("Which Linode? ") response = linode.listDisks(vps) print(response['ERRORARRAY']) for disk in response['DATA']: print disk['DISKID'] if userInput == 'list IPs': vps = raw_input("Which Linode? ") response = linode.listIPs(vps) print response
28.854839
75
0.643935
216
1,789
5.259259
0.277778
0.112676
0.048415
0.070423
0.284331
0.247359
0.206866
0.056338
0
0
0
0.003492
0.199553
1,789
61
76
29.327869
0.789804
0
0
0.265306
0
0
0.188373
0
0
0
0
0
0
0
null
null
0.040816
0.040816
null
null
0.204082
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fa9cd956e95a761dad517dc12fcf1239628e18a4
6,694
py
Python
base/migrations/0002_auto_20210622_1947.py
francofgp/Syndeo
888a1001f2cbb2ff8b7247e84a2899dcbd08af80
[ "MIT" ]
3
2022-01-04T17:38:04.000Z
2022-01-05T12:45:22.000Z
base/migrations/0002_auto_20210622_1947.py
francofgp/Syndeo
888a1001f2cbb2ff8b7247e84a2899dcbd08af80
[ "MIT" ]
null
null
null
base/migrations/0002_auto_20210622_1947.py
francofgp/Syndeo
888a1001f2cbb2ff8b7247e84a2899dcbd08af80
[ "MIT" ]
null
null
null
# Generated by Django 3.2.2 on 2021-06-22 22:47 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('base', '0001_initial'), ] operations = [ migrations.CreateModel( name='Categoria', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nombre', models.CharField(max_length=30, null=True)), ], ), migrations.RemoveField( model_name='palabra', name='name', ), migrations.AddField( model_name='account', name='descripcion', field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name='account', name='imagenPerfil', field=models.ImageField(blank=True, default='images/default_user.png', null=True, upload_to='images/'), ), migrations.AddField( model_name='account', name='imagenPortada', field=models.ImageField(blank=True, default='images/default_portada.jpg', null=True, upload_to='images/'), ), migrations.AddField( model_name='account', name='last_name', field=models.CharField(blank=True, max_length=30, null=True), ), migrations.AddField( model_name='account', name='metaDiaria', field=models.BigIntegerField(null=True), ), migrations.AddField( model_name='palabra', name='DiasAAgregarSiCorrecto', field=models.PositiveIntegerField(blank=True, null=True), ), migrations.AddField( model_name='palabra', name='cantidadDeRepasosHastaElProximoNivel', field=models.BigIntegerField(blank=True, null=True), ), migrations.AddField( model_name='palabra', name='cantidadRepasos', field=models.PositiveIntegerField(blank=True, default=0, null=True), ), migrations.AddField( model_name='palabra', name='fechaHastaDescenderNivel', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='palabra', name='fechaLeidaPrimeraVez', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='palabra', name='fechaSiguienteRepaso', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='palabra', name='fechaUltimoRepaso', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='palabra', name='idioma', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='base.idioma'), ), migrations.AddField( model_name='palabra', name='palabra', field=models.CharField(blank=True, max_length=30, null=True), ), migrations.AddField( model_name='palabra', name='traduccion', field=models.CharField(blank=True, max_length=100, null=True), ), migrations.AddField( model_name='palabra', name='usuario', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='palabra', name='dificultad', field=models.PositiveIntegerField(blank=True, default=0, null=True), ), migrations.AlterField( model_name='palabra', name='fechaCreacion', field=models.DateField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='palabra', name='fechaModificacion', field=models.DateField(auto_now=True, null=True), ), migrations.CreateModel( name='Texto', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('fechaCreacion', models.DateField(auto_now_add=True, null=True)), ('fechaModificacion', models.DateField(auto_now=True, null=True)), ('cantidadPalabras', models.BigIntegerField(blank=True, null=True)), ('texto', models.TextField(blank=True, max_length=100000, null=True)), ('audio', models.FileField(blank=True, null=True, upload_to='')), ('youtubeURL', models.URLField(blank=True, null=True)), ('imagen', models.ImageField(blank=True, null=True, upload_to='')), ('completado', models.BooleanField(blank=True, default=False, null=True)), ('fechaUltimaLectura', models.DateField(blank=True, null=True)), ('fechaCompletado', models.DateField(blank=True, null=True)), ('categoria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base.categoria')), ('idioma', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='base.idioma')), ('usuario', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='textos', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Desafios', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nombre', models.CharField(max_length=30, null=True)), ('cantidadPalabras', models.BigIntegerField(null=True)), ('cantidadPalabrasLeidas', models.BigIntegerField(null=True)), ('fechaFinalizacion', models.DateField(null=True)), ('fechaCreacion', models.DateField(auto_now_add=True, null=True)), ('imagen', models.ImageField(blank=True, null=True, upload_to='')), ('usuario', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( model_name='palabra', name='texto', field=models.ManyToManyField(to='base.Texto'), ), ]
42.100629
155
0.582313
631
6,694
6.0729
0.174326
0.083507
0.065762
0.119781
0.722077
0.697547
0.612735
0.585073
0.479645
0.456681
0
0.007951
0.286077
6,694
158
156
42.367089
0.79389
0.006722
0
0.598684
1
0
0.124868
0.023018
0
0
0
0
0
1
0
false
0
0.019737
0
0.039474
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
faa43f92ed4419078ca8c87c5823af07d1f539bd
1,711
py
Python
test/synth_mcmc.py
chris-stock/pyglm
fff2e91b0936275f37a7860bd75f867b560f4993
[ "MIT" ]
1
2017-03-11T19:10:17.000Z
2017-03-11T19:10:17.000Z
test/synth_mcmc.py
chris-stock/pyglm
fff2e91b0936275f37a7860bd75f867b560f4993
[ "MIT" ]
null
null
null
test/synth_mcmc.py
chris-stock/pyglm
fff2e91b0936275f37a7860bd75f867b560f4993
[ "MIT" ]
null
null
null
# Run as script using 'python -m test.synth' import cPickle import os import scipy.io from models.model_factory import * from inference.gibbs import gibbs_sample from utils.avg_dicts import average_list_of_dicts from synth_harness import initialize_test_harness from plotting.plot_results import plot_results from population import Population def run_synth_test(): """ Run a test with synthetic data and MCMC inference """ options, popn, data, popn_true, x_true = initialize_test_harness() # If x0 specified, load x0 from file x0 = None if options.x0_file is not None: with open(options.x0_file, 'r') as f: print "Initializing with state from: %s" % options.x0_file mle_x0 = cPickle.load(f) # HACK: We're assuming x0 came from a standard GLM mle_model = make_model('standard_glm', N=data['N']) mle_popn = Population(mle_model) mle_popn.set_data(data) x0 = popn.sample() x0 = convert_model(mle_popn, mle_model, mle_x0, popn, popn.model, x0) # Perform inference N_samples = 1000 x_smpls = gibbs_sample(popn, data, x0=x0, N_samples=N_samples) # Save results results_file = os.path.join(options.resultsDir, 'results.pkl') print "Saving results to %s" % results_file with open(results_file, 'w') as f: cPickle.dump(x_smpls, f, protocol=-1) # Plot average of last 20% of samples smpl_frac = 0.2 plot_results(popn, x_smpls[-1*int(smpl_frac*N_samples):], popn_true=popn_true, x_true=x_true, resdir=options.resultsDir) if __name__ == "__main__": run_synth_test()
32.903846
81
0.661017
247
1,711
4.336032
0.384615
0.029879
0.02521
0.024276
0
0
0
0
0
0
0
0.01875
0.251899
1,711
51
82
33.54902
0.817969
0.1128
0
0
0
0
0.05931
0
0
0
0
0
0
0
null
null
0
0.257143
null
null
0.057143
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
faa4586ea3fbf6fea84324823f11f12d5b8ac80d
1,297
py
Python
pysh/transforms/autoimport.py
drslump/pysh
673cdf2b5ea95dc3209cb294bb91cb2f298bb888
[ "MIT" ]
3
2018-07-09T04:39:24.000Z
2020-11-27T05:44:56.000Z
pysh/transforms/autoimport.py
drslump/pysh
673cdf2b5ea95dc3209cb294bb91cb2f298bb888
[ "MIT" ]
null
null
null
pysh/transforms/autoimport.py
drslump/pysh
673cdf2b5ea95dc3209cb294bb91cb2f298bb888
[ "MIT" ]
1
2018-08-02T21:57:11.000Z
2018-08-02T21:57:11.000Z
""" Every name reference is swapped for a call to ``__autoimport__``, which will check if it's part of the locals or globals, falling back to trying an import before giving up. """ from importlib import import_module from ast import NodeTransformer, copy_location, fix_missing_locations, \ AST, Call, Name, Load, Str, keyword from typing import Any, Union, Dict __all__ = ['__autoimport__'] class AutoImportTransformer(NodeTransformer): def visit_Name(self, node: Name) -> Union[Name, Call]: if not isinstance(node.ctx, Load): return node delegate = Call( func=Name(id='__autoimport__', ctx=Load()), args=[ Str(s=node.id) ], keywords=[]) copy_location(delegate, node) fix_missing_locations(delegate) return delegate def __autoimport__(name: str) -> Any: import inspect f_back = inspect.currentframe().f_back #type: ignore if name in f_back.f_locals: return f_back.f_locals[name] if name in f_back.f_globals: return f_back.f_globals[name] try: return import_module(name) except ImportError: pass raise NameError(name) def parser(node: AST) -> AST: return AutoImportTransformer().visit(node)
23.160714
72
0.652274
164
1,297
4.920732
0.463415
0.037175
0.02974
0.022305
0.034696
0.034696
0
0
0
0
0
0
0.256746
1,297
55
73
23.581818
0.837137
0.142637
0
0
0
0
0.025362
0
0
0
0
0
0
1
0.09375
false
0.03125
0.34375
0.03125
0.65625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
faa609968ff5a4b42dcd92861e9a5c5ca3e91929
3,103
py
Python
apps/site/api/views/abstract_views.py
LocalGround/localground
aa5a956afe7a84a7763a3b23d62a9fd925831cd7
[ "Apache-2.0" ]
9
2015-05-29T22:22:20.000Z
2022-02-01T20:39:00.000Z
apps/site/api/views/abstract_views.py
LocalGround/localground
aa5a956afe7a84a7763a3b23d62a9fd925831cd7
[ "Apache-2.0" ]
143
2015-01-22T15:03:40.000Z
2020-06-27T01:55:29.000Z
apps/site/api/views/abstract_views.py
LocalGround/localground
aa5a956afe7a84a7763a3b23d62a9fd925831cd7
[ "Apache-2.0" ]
5
2015-03-16T20:51:49.000Z
2017-02-07T20:48:49.000Z
from localground.apps.lib.helpers import get_timestamp_no_milliseconds from localground.apps.site.api import filters from localground.apps.site import models from rest_framework import generics, status, exceptions from localground.apps.site.api.serializers.user_profile_serializer import \ UserProfileSerializer from django.core.exceptions import ValidationError from rest_framework.exceptions import APIException from localground.apps.site.api.permissions import \ CheckProjectPermissions, CheckUserCanPostToProject class QueryableListCreateAPIView(generics.ListCreateAPIView): def metadata(self, request): # extend the existing metadata method in the parent class by adding a # list of available filters from localground.apps.lib.helpers import QueryParser from django.utils.datastructures import SortedDict ret = super(QueryableListCreateAPIView, self).metadata(request) ret = SortedDict(ret) try: query = QueryParser(self.model, request.GET.get('query')) ret['filters'] = query.to_dict_list() except Exception: pass return ret class QueryableListAPIView(generics.ListAPIView): def metadata(self, request): # extend the existing metadata method in the parent class by adding a # list of available filters from localground.apps.lib.helpers import QueryParser from django.utils.datastructures import SortedDict ret = super(QueryableListAPIView, self).metadata(request) ret = SortedDict(ret) try: query = QueryParser(self.model, request.GET.get('query')) ret['filters'] = query.to_dict_list() except Exception: pass return ret class QueryableRetrieveUpdateDestroyView( generics.RetrieveUpdateDestroyAPIView): def metadata(self, request): # extend the existing metadata method in the parent class by adding a # list of available filters from localground.apps.lib.helpers import QueryParser from django.utils.datastructures import SortedDict ret = super(QueryableListCreateAPIView, self).metadata(request) ret = SortedDict(ret) try: query = QueryParser(self.model, request.GET.get('query')) ret['filters'] = query.to_dict_list() except Exception: pass return ret class MediaList(QueryableListCreateAPIView): filter_backends = (filters.SQLFilterBackend, filters.RequiredProjectFilter) permission_classes = (CheckProjectPermissions, CheckUserCanPostToProject) ext_whitelist = [] def get_queryset(self): if self.request.user.is_authenticated(): return self.model.objects.get_objects(self.request.user) else: return self.model.objects.get_objects_public( access_key=self.request.GET.get('access_key') ) class MediaInstance(generics.RetrieveUpdateDestroyAPIView): def get_queryset(self): return self.model.objects.select_related('owner').all()
36.081395
79
0.705769
326
3,103
6.641104
0.282209
0.055427
0.070208
0.040647
0.574134
0.538106
0.492379
0.492379
0.492379
0.492379
0
0
0.221399
3,103
85
80
36.505882
0.896109
0.090558
0
0.557377
0
0
0.018117
0
0
0
0
0
0
1
0.081967
false
0.04918
0.229508
0.016393
0.540984
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
faa7367cd6852d4396d5b354611f83726f61a9ab
690
py
Python
users/migrations/0017_auto_20190828_1311.py
dhanupandey12/Blog
fcd274b7249c255786b46cf81d6e949a903e9a53
[ "MIT" ]
null
null
null
users/migrations/0017_auto_20190828_1311.py
dhanupandey12/Blog
fcd274b7249c255786b46cf81d6e949a903e9a53
[ "MIT" ]
null
null
null
users/migrations/0017_auto_20190828_1311.py
dhanupandey12/Blog
fcd274b7249c255786b46cf81d6e949a903e9a53
[ "MIT" ]
null
null
null
# Generated by Django 2.1.5 on 2019-08-28 07:41 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('users', '0016_friends'), ] operations = [ migrations.AlterModelOptions( name='friends', options={'verbose_name': 'Friend List', 'verbose_name_plural': 'Friend List'}, ), migrations.AlterModelOptions( name='messagedata', options={'verbose_name': 'Messages', 'verbose_name_plural': 'Messages'}, ), migrations.RenameField( model_name='friends', old_name='friends', new_name='friendList', ), ]
25.555556
90
0.575362
63
690
6.142857
0.587302
0.113695
0.160207
0
0
0
0
0
0
0
0
0.039256
0.298551
690
26
91
26.538462
0.760331
0.065217
0
0.25
1
0
0.247278
0
0
0
0
0
0
1
0
false
0
0.05
0
0.2
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
faadc11e3f24c6373636e3bb5814da8ad0ad2c17
14,140
py
Python
anima/env/nukeEnv.py
tws0002/anima
73c256d1f7716a2db7933d6d8519a51333c7e5b4
[ "BSD-2-Clause" ]
null
null
null
anima/env/nukeEnv.py
tws0002/anima
73c256d1f7716a2db7933d6d8519a51333c7e5b4
[ "BSD-2-Clause" ]
null
null
null
anima/env/nukeEnv.py
tws0002/anima
73c256d1f7716a2db7933d6d8519a51333c7e5b4
[ "BSD-2-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2012-2018, Anima Istanbul # # This module is part of anima-tools and is released under the BSD 2 # License: http://www.opensource.org/licenses/BSD-2-Clause import os import nuke from nukescripts import * from anima.env import empty_reference_resolution from anima.env.base import EnvironmentBase class Nuke(EnvironmentBase): """the nuke environment class """ name = "Nuke" extensions = ['.nk'] def __init__(self, name='', version=None): """nuke specific init """ super(Nuke, self).__init__(name=name, version=version) # and add you own modifications to __init__ # get the root node self._root = self.get_root_node() self._main_output_node_name = "MAIN_OUTPUT" def get_root_node(self): """returns the root node of the current nuke session """ return nuke.toNode("root") def save_as(self, version, run_pre_publishers=True): """"the save action for nuke environment uses Nukes own python binding """ # get the current version, and store it as the parent of the new version current_version = self.get_current_version() # first initialize the version path version.update_paths() # set the extension to '.nk' version.extension = self.extensions[0] # set created_with to let the UI show Nuke icon in versions list version.created_with = self.name # set project_directory # self.project_directory = os.path.dirname(version.absolute_path) # create the main write node self.create_main_write_node(version) # replace read and write node paths # self.replace_external_paths() # create the path before saving try: os.makedirs(version.absolute_path) except OSError: # path already exists OSError pass # set frame range # if this is a shot related task set it to shots resolution is_shot_related_task = False shot = None from stalker import Shot for task in version.task.parents: if isinstance(task, Shot): is_shot_related_task = True shot = task break # set scene fps project = version.task.project self.set_fps(project.fps) if version.version_number == 1: if is_shot_related_task: # just set if the frame range is not 1-1 if shot.cut_in != 1 and shot.cut_out != 1: self.set_frame_range( shot.cut_in, shot.cut_out ) imf = shot.image_format else: imf = project.image_format # TODO: set the render resolution later # self.set_resolution( # imf.width, # imf.height, # imf.pixel_aspect # ) nuke.scriptSaveAs(version.absolute_full_path) if current_version: # update the parent info version.parent = current_version # update database with new version info from stalker.db.session import DBSession DBSession.commit() return True def export_as(self, version): """the export action for nuke environment """ # set the extension to '.nk' version.update_paths() version.extension = self.extensions[0] nuke.nodeCopy(version.absolute_full_path) return True def open(self, version, force=False, representation=None, reference_depth=0, skip_update_check=False): """the open action for nuke environment """ nuke.scriptOpen(version.absolute_full_path) # set the project_directory # self.project_directory = os.path.dirname(version.absolute_path) # TODO: file paths in different OS'es should be replaced with the current one # Check if the file paths are starting with a string matching one of the # OS'es project_directory path and replace them with a relative one # matching the current OS # replace paths # self.replace_external_paths() # return True to specify everything was ok and an empty list # for the versions those needs to be updated return empty_reference_resolution() def import_(self, version, use_namespace=True): """the import action for nuke environment """ nuke.nodePaste(version.absolute_full_path) return True def get_current_version(self): """Finds the Version instance from the current open file. If it can't find any then returns None. :return: :class:`~oyProjectManager.models.version.Version` """ full_path = self._root.knob('name').value() return self.get_version_from_full_path(full_path) def get_version_from_recent_files(self): """It will try to create a :class:`~oyProjectManager.models.version.Version` instance by looking at the recent files list. It will return None if it can not find one. :return: :class:`~oyProjectManager.models.version.Version` """ # use the last file from the recent file list i = 1 while True: try: full_path = nuke.recentFile(i) except RuntimeError: # no recent file anymore just return None return None i += 1 version = self.get_version_from_full_path(full_path) if version is not None: return version def get_version_from_project_dir(self): """Tries to find a Version from the current project directory :return: :class:`~oyProjectManager.models.version.Version` """ versions = self.get_versions_from_path(self.project_directory) version = None if versions: version = versions[0] return version def get_last_version(self): """gets the file name from nuke """ version = self.get_current_version() # read the recent file list if version is None: version = self.get_version_from_recent_files() # get the latest possible Version instance by using the workspace path if version is None: version = self.get_version_from_project_dir() return version def get_frame_range(self): """returns the current frame range """ #self._root = self.get_root_node() startFrame = int(self._root.knob('first_frame').value()) endFrame = int(self._root.knob('last_frame').value()) return startFrame, endFrame def set_frame_range(self, start_frame=1, end_frame=100, adjust_frame_range=False): """sets the start and end frame range """ self._root.knob('first_frame').setValue(start_frame) self._root.knob('last_frame').setValue(end_frame) def set_fps(self, fps=25): """sets the current fps """ self._root.knob('fps').setValue(fps) def get_fps(self): """returns the current fps """ return int(self._root.knob('fps').getValue()) def set_resolution(self, width, height, pixel_aspect=1.0): """Sets the resolution of the current scene :param width: The width of the output image :param height: The height of the output image :param pixel_aspect: The pixel aspect ratio """ # TODO: set resolution later pass def get_main_write_nodes(self): """Returns the main write node in the scene or None. """ # list all the write nodes in the current file all_main_write_nodes = [] for write_node in nuke.allNodes("Write"): if write_node.name().startswith(self._main_output_node_name): all_main_write_nodes.append(write_node) return all_main_write_nodes def create_main_write_node(self, version): """creates the default write node if there is no one created before. """ # list all the write nodes in the current file main_write_nodes = self.get_main_write_nodes() # check if there is a write node or not if not len(main_write_nodes): # create one with correct output path main_write_node = nuke.nodes.Write() main_write_node.setName(self._main_output_node_name) main_write_nodes.append(main_write_node) for main_write_node in main_write_nodes: # set the output path output_file_name = "" output_file_name = version.task.project.code + "_" # get the output format output_format_enum = \ main_write_node.knob('file_type').value().strip() if output_format_enum == '': # set it to png by default output_format_enum = 'png' main_write_node.knob('file_type').setValue(output_format_enum) elif output_format_enum == 'ffmpeg': output_format_enum = 'mov' elif output_format_enum == 'targa': output_format_enum = 'tga' output_file_name += '%s_v%03d' % ( version.nice_name, version.version_number ) if output_format_enum != 'mov': output_file_name += ".####." + output_format_enum else: output_file_name += '.' + output_format_enum # check if it is a stereo comp # if it is enable separate view rendering # set the output path output_file_full_path = os.path.join( version.absolute_path, 'Outputs', version.take_name, 'v%03d' % version.version_number, output_format_enum, output_file_name ).replace("\\", "/") # create the path try: os.makedirs( os.path.dirname( output_file_full_path ) ) except OSError: # path already exists pass # set the output file path main_write_node.knob("file").setValue(output_file_full_path) def replace_external_paths(self, mode=0): """make paths relative to the project dir """ # TODO: replace file paths if project_directory changes # check if the project_directory is still the same # if it is do the regular replacement # but if it is not then expand all the paths to absolute paths # convert the given path to tcl environment script from anima import utils def rep_path(path): return utils.relpath(self.project_directory, path, "/", "..") # get all read nodes allNodes = nuke.allNodes() readNodes = [node for node in allNodes if node.Class() == "Read"] writeNodes = [node for node in allNodes if node.Class() == "Write"] readGeoNodes = [node for node in allNodes if node.Class() == "ReadGeo"] readGeo2Nodes = [node for node in allNodes if node.Class() == "ReadGeo2"] writeGeoNodes = [node for node in allNodes if node.Class() == "WriteGeo"] def nodeRep(nodes): """helper function to replace path values """ [node["file"].setValue( rep_path( os.path.expandvars( os.path.expanduser( node["file"].getValue() ) ).replace('\\', '/') ) ) for node in nodes] nodeRep(readNodes) nodeRep(writeNodes) nodeRep(readGeoNodes) nodeRep(readGeo2Nodes) nodeRep(writeGeoNodes) @property def project_directory(self): """The project directory. Set it to the project root, and set all your paths relative to this directory. """ root = self.get_root_node() # TODO: root node gets lost, fix it # there is a bug in Nuke, the root node get lost time to time find # the source and fix it. # if root is None: # # there is a bug about Nuke, # # sometimes it losses the root node, while it shouldn't # # I can't find the source # # so instead of using the root node, # # just return the os.path.dirname(version.path) # # return os.path.dirname(self.version.path) return root["project_directory"].getValue() @project_directory.setter def project_directory(self, project_directory_in): project_directory_in = project_directory_in.replace("\\", "/") root = self.get_root_node() root["project_directory"].setValue(project_directory_in) def create_slate_info(self): """Returns info about the current shot which will contribute to the shot slate :return: string """ version = self.get_current_version() shot = version.task # create a jinja2 template import jinja2 template = jinja2.Template("""Project: {{shot.project.name}} Shot: {{shot.name}} Frame Range: {{shot.cut_in}}-{{shot.cut_out}} Handles: +{{shot.handle_at_start}}, -{{shot.handle_at_end}} Artist: {% for resource in shot.resources %}{{resource.name}}{%- if loop.index != 1%}, {% endif -%}{% endfor %} Version: v{{'%03d'|format(version.version_number)}} Status: {{version.task.status.name}} """) template_vars = { "shot": shot, "version": version } return template.render(**template_vars)
33.037383
111
0.585007
1,678
14,140
4.752682
0.196067
0.040125
0.024075
0.00815
0.228339
0.153103
0.081003
0.071975
0.036113
0.017053
0
0.004659
0.332108
14,140
427
112
33.114754
0.839792
0.315205
0
0.127451
0
0.004902
0.066079
0.020845
0
0
0
0.01171
0
1
0.112745
false
0.014706
0.04902
0.004902
0.254902
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
faaed7d726fb97374e33fe05e85c8cca35b6cc7d
2,015
py
Python
web/certificates/migrations/0011_doublecoutingregistration_doublecoutingregistrationinputoutput.py
MTES-MCT/biocarburants
ff084916e18cdbdc41400f36fa6cc76a5e05900e
[ "MIT" ]
4
2020-03-22T18:13:12.000Z
2021-01-25T10:33:31.000Z
web/certificates/migrations/0011_doublecoutingregistration_doublecoutingregistrationinputoutput.py
MTES-MCT/carbure
2876756b760ab4866fa783bb40e61a046eebb1ab
[ "MIT" ]
20
2020-07-06T14:33:14.000Z
2022-03-15T16:54:17.000Z
web/certificates/migrations/0011_doublecoutingregistration_doublecoutingregistrationinputoutput.py
MTES-MCT/biocarburants
ff084916e18cdbdc41400f36fa6cc76a5e05900e
[ "MIT" ]
4
2020-04-03T12:19:12.000Z
2021-06-15T12:20:57.000Z
# Generated by Django 3.2.4 on 2021-07-22 09:40 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('core', '0175_lotv2_lots_v2_year_87d135_idx'), ('certificates', '0010_auto_20210509_1038'), ] operations = [ migrations.CreateModel( name='DoubleCoutingRegistration', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('certificate_id', models.CharField(max_length=64)), ('certificate_holder', models.CharField(max_length=256)), ('registered_address', models.TextField()), ('valid_from', models.DateField()), ('valid_until', models.DateField()), ], options={ 'verbose_name': 'Certificat Double Compte', 'verbose_name_plural': 'Certificats Double Compte', 'db_table': 'double_counting_registrations', }, ), migrations.CreateModel( name='DoubleCoutingRegistrationInputOutput', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('biofuel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.biocarburant')), ('certificate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='certificates.doublecoutingregistration')), ('feedstock', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.matierepremiere')), ], options={ 'verbose_name': 'Périmètre Certificat Double Compte', 'verbose_name_plural': 'Périmètres Certificats Double Compte', 'db_table': 'double_counting_registrations_scope', }, ), ]
43.804348
144
0.60794
186
2,015
6.38172
0.451613
0.055602
0.047178
0.074136
0.432182
0.432182
0.36647
0.36647
0.27043
0.230834
0
0.031886
0.268486
2,015
45
145
44.777778
0.773406
0.022333
0
0.307692
1
0
0.292683
0.111789
0
0
0
0
0
1
0
false
0
0.051282
0
0.128205
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fab1ae1fd23f251523f0e24d567662098132f1c5
232
py
Python
edx/problem_set_1/bob.py
spradeepv/dive-into-python
ec27d4686b7b007d21f9ba4f85d042be31ee2639
[ "MIT" ]
null
null
null
edx/problem_set_1/bob.py
spradeepv/dive-into-python
ec27d4686b7b007d21f9ba4f85d042be31ee2639
[ "MIT" ]
null
null
null
edx/problem_set_1/bob.py
spradeepv/dive-into-python
ec27d4686b7b007d21f9ba4f85d042be31ee2639
[ "MIT" ]
null
null
null
s = raw_input() find = 'bob' count = 0 index = 0 while index < len(s): index = s.find(find, index) if index == -1: break index += 2 count += 1 print "Number of times bob occurs is:", count
21.090909
45
0.521552
34
232
3.529412
0.588235
0
0
0
0
0
0
0
0
0
0
0.033333
0.353448
232
11
45
21.090909
0.766667
0
0
0
0
0
0.141631
0
0
0
0
0
0
0
null
null
0
0
null
null
0.090909
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
fab458a70a67ab83469b992d3cb10888dc4cb5fd
6,321
py
Python
.qt_for_python/uic/installer.py
thomascswalker/bettergameexporter
4db3683a599d523e28c2f93bdcac889277130153
[ "MIT" ]
null
null
null
.qt_for_python/uic/installer.py
thomascswalker/bettergameexporter
4db3683a599d523e28c2f93bdcac889277130153
[ "MIT" ]
null
null
null
.qt_for_python/uic/installer.py
thomascswalker/bettergameexporter
4db3683a599d523e28c2f93bdcac889277130153
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- ################################################################################ ## Form generated from reading UI file 'installer.ui' ## ## Created by: Qt User Interface Compiler version 5.15.1 ## ## WARNING! All changes made in this file will be lost when recompiling UI file! ################################################################################ from PySide2.QtCore import * from PySide2.QtGui import * from PySide2.QtWidgets import * class Ui_MainWindow(object): def setupUi(self, MainWindow): if not MainWindow.objectName(): MainWindow.setObjectName(u"MainWindow") MainWindow.resize(550, 320) sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth()) MainWindow.setSizePolicy(sizePolicy) MainWindow.setMinimumSize(QSize(550, 320)) MainWindow.setMaximumSize(QSize(550, 320)) self.centralwidget = QWidget(MainWindow) self.centralwidget.setObjectName(u"centralwidget") self.gridLayout = QGridLayout(self.centralwidget) self.gridLayout.setObjectName(u"gridLayout") self.gridLayout.setContentsMargins(20, 20, 20, 20) self.horizontalLayout = QHBoxLayout() self.horizontalLayout.setObjectName(u"horizontalLayout") self.horizontalLayout.setContentsMargins(-1, 20, -1, -1) self.maxVersionList = QComboBox(self.centralwidget) self.maxVersionList.setObjectName(u"maxVersionList") self.horizontalLayout.addWidget(self.maxVersionList) self.maxVersionExplore = QToolButton(self.centralwidget) self.maxVersionExplore.setObjectName(u"maxVersionExplore") self.horizontalLayout.addWidget(self.maxVersionExplore) self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 1) self.horizontalLayout_2 = QHBoxLayout() self.horizontalLayout_2.setObjectName(u"horizontalLayout_2") self.horizontalLayout_2.setContentsMargins(-1, 20, -1, -1) self.horizontalSpacer = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum) self.horizontalLayout_2.addItem(self.horizontalSpacer) self.uninstall = QPushButton(self.centralwidget) self.uninstall.setObjectName(u"uninstall") self.uninstall.setEnabled(False) sizePolicy.setHeightForWidth(self.uninstall.sizePolicy().hasHeightForWidth()) self.uninstall.setSizePolicy(sizePolicy) self.uninstall.setMinimumSize(QSize(120, 32)) self.horizontalLayout_2.addWidget(self.uninstall) self.install = QPushButton(self.centralwidget) self.install.setObjectName(u"install") sizePolicy.setHeightForWidth(self.install.sizePolicy().hasHeightForWidth()) self.install.setSizePolicy(sizePolicy) self.install.setMinimumSize(QSize(120, 32)) self.horizontalLayout_2.addWidget(self.install) self.gridLayout.addLayout(self.horizontalLayout_2, 3, 0, 1, 1) self.horizontalLayout_3 = QHBoxLayout() self.horizontalLayout_3.setObjectName(u"horizontalLayout_3") self.verticalLayout = QVBoxLayout() self.verticalLayout.setObjectName(u"verticalLayout") self.verticalLayout.setContentsMargins(0, -1, -1, -1) self.label = QLabel(self.centralwidget) self.label.setObjectName(u"label") sizePolicy1 = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed) sizePolicy1.setHorizontalStretch(0) sizePolicy1.setVerticalStretch(0) sizePolicy1.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth()) self.label.setSizePolicy(sizePolicy1) self.label.setTextFormat(Qt.RichText) self.label.setWordWrap(True) self.verticalLayout.addWidget(self.label) self.label_3 = QLabel(self.centralwidget) self.label_3.setObjectName(u"label_3") self.label_3.setWordWrap(True) self.verticalLayout.addWidget(self.label_3) self.horizontalLayout_3.addLayout(self.verticalLayout) self.horizontalSpacer_2 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum) self.horizontalLayout_3.addItem(self.horizontalSpacer_2) self.gridLayout.addLayout(self.horizontalLayout_3, 0, 0, 1, 1) self.horizontalLayout_4 = QHBoxLayout() self.horizontalLayout_4.setObjectName(u"horizontalLayout_4") self.label_2 = QLabel(self.centralwidget) self.label_2.setObjectName(u"label_2") self.label_2.setTextFormat(Qt.AutoText) self.horizontalLayout_4.addWidget(self.label_2) self.installPath = QLabel(self.centralwidget) self.installPath.setObjectName(u"installPath") self.installPath.setTextFormat(Qt.RichText) self.installPath.setWordWrap(False) self.horizontalLayout_4.addWidget(self.installPath) self.horizontalSpacer_3 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum) self.horizontalLayout_4.addItem(self.horizontalSpacer_3) self.gridLayout.addLayout(self.horizontalLayout_4, 2, 0, 1, 1) MainWindow.setCentralWidget(self.centralwidget) self.retranslateUi(MainWindow) QMetaObject.connectSlotsByName(MainWindow) # setupUi def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"MainWindow", None)) self.maxVersionExplore.setText(QCoreApplication.translate("MainWindow", u"Open", None)) self.uninstall.setText(QCoreApplication.translate("MainWindow", u"Uninstall", None)) self.install.setText(QCoreApplication.translate("MainWindow", u"Install", None)) self.label.setText(QCoreApplication.translate("MainWindow", u"<html><head/><body><p><span style=\" font-size:16pt;\">Better Max Tools</span></p></body></html>", None)) self.label_3.setText(QCoreApplication.translate("MainWindow", u"This will install the better-max-tools package into an environment accessible by 3ds Max.", None)) self.label_2.setText(QCoreApplication.translate("MainWindow", u"Install location:", None)) self.installPath.setText("") # retranslateUi
42.422819
175
0.701946
620
6,321
7.095161
0.216129
0.109116
0.047738
0.057286
0.265515
0.134349
0.099341
0.076154
0.076154
0
0
0.025709
0.169277
6,321
148
176
42.709459
0.812036
0.035754
0
0
1
0
0.080574
0.009628
0
0
0
0
0
1
0.020408
false
0
0.030612
0
0.061224
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fab4d1310e9ef911cecc3613e34d95dcfd6c0157
404
py
Python
paginas/migrations/0014_auto_20190206_2343.py
igor-pontes/Dolex
4d65a288fbf6cdf44628994fea3c821e8af2ea61
[ "MIT" ]
null
null
null
paginas/migrations/0014_auto_20190206_2343.py
igor-pontes/Dolex
4d65a288fbf6cdf44628994fea3c821e8af2ea61
[ "MIT" ]
4
2020-07-14T13:02:48.000Z
2021-06-10T20:27:09.000Z
paginas/migrations/0014_auto_20190206_2343.py
igor-pontes/Dolex
4d65a288fbf6cdf44628994fea3c821e8af2ea61
[ "MIT" ]
null
null
null
# Generated by Django 2.1.5 on 2019-02-06 23:43 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('paginas', '0013_auto_20190206_1739'), ] operations = [ migrations.AlterField( model_name='players_lobby', name='slot', field=models.CharField(default=None, max_length=2), ), ]
21.263158
63
0.608911
45
404
5.333333
0.844444
0
0
0
0
0
0
0
0
0
0
0.109589
0.277228
404
18
64
22.444444
0.712329
0.111386
0
0
1
0
0.131653
0.064426
0
0
0
0
0
1
0
false
0
0.083333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
fab71b81531484d1696ade3aa93eda9a02495ebe
4,028
py
Python
design_patterns/factory_andri.py
andricampagnaro/documentacoes_e_testes
a12b1348dbb43ad72fe56a6287e228d6c031e36f
[ "MIT" ]
null
null
null
design_patterns/factory_andri.py
andricampagnaro/documentacoes_e_testes
a12b1348dbb43ad72fe56a6287e228d6c031e36f
[ "MIT" ]
4
2021-06-08T21:55:17.000Z
2022-01-13T02:57:49.000Z
design_patterns/factory_andri.py
andricampagnaro/documentacao_python3
a12b1348dbb43ad72fe56a6287e228d6c031e36f
[ "MIT" ]
null
null
null
# import logging # logging.basicConfig(filename='example.log',level=logging.DEBUG) # logging.debug('This message should go to the log file') # logging.info('So should this') # logging.warning('And this, too ã') class ValidaSmart(): def __init__(self): self._carrega_modulos_externos() def _carrega_modulos_externos(self): self.layout_pessoas = LayoutPessoas() self.layout_pessoas_enderecos = LayoutPessoasEnderecos() self.layout_produtos = LayoutProdutos() def executa(self): print('[ValidaSmart] Executando...') self.layout_pessoas.executa() self.layout_pessoas_enderecos.executa() self.layout_produtos.executa() print('[ValidaSmart] Executado!') ######################################################## class LayoutPessoas(): def __init__(self): self._carrega_modulos_externos() def _carrega_modulos_externos(self): pass def executa(self): print('[LayoutPessoas] Executando...') print('[LayoutPessoas] Executado!') class LayoutPessoasEnderecos(): def __init__(self): self._carrega_modulos_externos() def _carrega_modulos_externos(self): pass def executa(self): print('[LayoutPessoasEnderecos] Executando...') print('[LayoutPessoasEnderecos] Executado!') class LayoutProdutos(): def __init__(self): self._carrega_modulos_externos() def _carrega_modulos_externos(self): self.campo_ncm = CampoNCM([1, 2, 3, 4, 5]) def executa(self): print('[LayoutProdutos] Executando...') self.campo_ncm.confere_dados() print('[LayoutProdutos] Executado!') ######################################################## def busca_ncm(ncm): print(f'[Funcao Busca NCM] Buscando NCM {ncm}') if ncm == 1: print(f'[Funcao Busca NCM] NCM {ncm} é válido') else: print(f'[Funcao Busca NCM] NCM {ncm} é inválido') ######################################################## class CampoTipoString(): def __init__(self, lista_registros): self.lista_registros = lista_registros self._quantidade_registros = len(self.lista_registros) self._nome_classe = self.__class__.__name__ @property def tipo(self): return 'string' def confere_dados(self): print(f'[{self._nome_classe}] Iniciando a conferencia padrão...') self._verifica_tipo_campo() self._verifica_tamanho_campo() self._valida_registros_em_branco() print(f'[{self._nome_classe}] Conferência padrão concluída.') def _verifica_tipo_campo(self): print(f'[{self._nome_classe}] Verificando tipo do campo...') print(f'[{self._nome_classe}] Tipo do campo verificado.') def _verifica_tamanho_campo(self): print(f'[{self._nome_classe}] Verificando tamanho do campo...') print(f'[{self._nome_classe}] Tamanho do campo verificado.') def _valida_registros_em_branco(self): print(f'[{self._nome_classe}] Validando caracteres em branco...') print(f'[{self._nome_classe}] Validação de caracteres em branco concluída.') ######################################################## class CampoNCM(CampoTipoString): def confere_dados(self): super().confere_dados() print(f'[{self._nome_classe}] Iniciando a conferencias específicas do campo...') self._valida_caracteres_permitidos() print(f'[{self._nome_classe}] Conferencias específicas do campo concluídas...') def busca_ncms(self): print('[CampoNCM] Buscando NCMs...') for ncm in self.lista_registros: busca_ncm(ncm) print('[CampoNCM] Busca de NCMs concluída.') def _valida_caracteres_permitidos(self): print(f'[{self._nome_classe}] Validando caracteres...') print(f'[{self._nome_classe}] Caracteres validados.') if __name__ == '__main__': valida_smart = ValidaSmart() valida_smart.executa() print(valida_smart.layout_produtos.campo_ncm.tipo)
33.566667
88
0.635055
430
4,028
5.62093
0.234884
0.037236
0.0753
0.069508
0.319404
0.29458
0.292925
0.22259
0.131568
0.131568
0
0.001851
0.195382
4,028
120
89
33.566667
0.743906
0.049652
0
0.235294
0
0
0.299806
0.083356
0
0
0
0
0
1
0.258824
false
0.023529
0
0.011765
0.341176
0.305882
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1