prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
er the License. # Glance Release Notes documentation build configuration file, created by # sphinx-quickstart on Tue Nov 3 17:40:50 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'oslosphinx', 'reno.sphinxext', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'cellar Release Notes' copyright = u'2016, OpenStack Foundation' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. # The full version, including alpha/beta/rc tags. release = '' # The short X.Y version. version = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" wi
ll overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to t
his directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'GlanceReleaseNotesdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'GlanceReleaseNotes.tex', u'Glance Release Notes Documentation', u'Glance Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'glancereleasenotes', u'Glance Release Notes Documentation', [u'Glance Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'GlanceReleaseNotes', u'Glance Release Notes Documentation', u'Glance Developers', 'GlanceReleaseNotes', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indic
"""List the IP forwarding rules"
"" from baseCmd import * from baseResponse import * class listIpForwardingRulesCmd (baseCmd): typeInfo = {} def __init__(self): self.isAsync = "false" """list resources by account. Must be used with the domainId parameter.""" self.account = None self.typeInfo['account'] = 'string' """list only resources belonging to the domain specified""" self.domainid = None self.typeInfo['domainid'] = 'uuid' """Lists rule with the specified I
D.""" self.id = None self.typeInfo['id'] = 'uuid' """list the rule belonging to this public IP address""" self.ipaddressid = None self.typeInfo['ipaddressid'] = 'uuid' """defaults to false, but if true, lists all resources from the parent specified by the domainId till leaves.""" self.isrecursive = None self.typeInfo['isrecursive'] = 'boolean' """List by keyword""" self.keyword = None self.typeInfo['keyword'] = 'string' """If set to false, list only resources belonging to the command's caller; if set to true - list resources that the caller is authorized to see. Default value is false""" self.listall = None self.typeInfo['listall'] = 'boolean' """""" self.page = None self.typeInfo['page'] = 'integer' """""" self.pagesize = None self.typeInfo['pagesize'] = 'integer' """list objects by project""" self.projectid = None self.typeInfo['projectid'] = 'uuid' """Lists all rules applied to the specified VM.""" self.virtualmachineid = None self.typeInfo['virtualmachineid'] = 'uuid' self.required = [] class listIpForwardingRulesResponse (baseResponse): typeInfo = {} def __init__(self): """the ID of the port forwarding rule""" self.id = None self.typeInfo['id'] = 'string' """the cidr list to forward traffic from""" self.cidrlist = None self.typeInfo['cidrlist'] = 'string' """is firewall for display to the regular user""" self.fordisplay = None self.typeInfo['fordisplay'] = 'boolean' """the public ip address for the port forwarding rule""" self.ipaddress = None self.typeInfo['ipaddress'] = 'string' """the public ip address id for the port forwarding rule""" self.ipaddressid = None self.typeInfo['ipaddressid'] = 'string' """the id of the guest network the port forwarding rule belongs to""" self.networkid = None self.typeInfo['networkid'] = 'string' """the ending port of port forwarding rule's private port range""" self.privateendport = None self.typeInfo['privateendport'] = 'string' """the starting port of port forwarding rule's private port range""" self.privateport = None self.typeInfo['privateport'] = 'string' """the protocol of the port forwarding rule""" self.protocol = None self.typeInfo['protocol'] = 'string' """the ending port of port forwarding rule's private port range""" self.publicendport = None self.typeInfo['publicendport'] = 'string' """the starting port of port forwarding rule's public port range""" self.publicport = None self.typeInfo['publicport'] = 'string' """the state of the rule""" self.state = None self.typeInfo['state'] = 'string' """the VM display name for the port forwarding rule""" self.virtualmachinedisplayname = None self.typeInfo['virtualmachinedisplayname'] = 'string' """the VM ID for the port forwarding rule""" self.virtualmachineid = None self.typeInfo['virtualmachineid'] = 'string' """the VM name for the port forwarding rule""" self.virtualmachinename = None self.typeInfo['virtualmachinename'] = 'string' """the vm ip address for the port forwarding rule""" self.vmguestip = None self.typeInfo['vmguestip'] = 'string' """the list of resource tags associated with the rule""" self.tags = [] class tags: def __init__(self): """"the account associated with the tag""" self.account = None """"customer associated with the tag""" self.customer = None """"the domain associated with the tag""" self.domain = None """"the ID of the domain associated with the tag""" self.domainid = None """"tag key name""" self.key = None """"the project name where tag belongs to""" self.project = None """"the project id the tag belongs to""" self.projectid = None """"id of the resource""" self.resourceid = None """"resource type""" self.resourcetype = None """"tag value""" self.value = None
import os from datetime import date from unittest.mock import MagicMock, call import pytest import imagesize from kaleidoscope import renderer, generator from kaleidoscope.model import Gallery, Album, Section, Photo from kaleidoscope.generator import generate, DefaultListener def test_generate_gallery_index(tmpdir, disable_resize): """Generator should generate gallery index file.""" gallery = Gallery("Testing Gallery", "The Tester", []) generate(gallery, str(tmpdir)) assert tmpdir.join("index.html").check() def test_gallery_index_context(tmpdir, monkeypatch, disable_resize): """Generator should provide the gallery object for index template.""" render_mock = MagicMock() monkeypatch.setattr(renderer, 'render', render_mock) gallery = Gallery("Testing Gallery", "The Tester", []) generate(gallery, str(tmpdir)) render_mock.assert_called_with( "gallery.html", str(tmpdir.join("index.html")), {'gallery': gallery, 'current_year': date.today().year} ) def test_album_index_generated(tmpdir, gallery_with_one_photo, disable_resize): """Generator should create album index file.""" generate(gallery_with_one_photo, str(tmpdir)) assert tmpdir.join("album", "index.html").exists() def test_album_index_context(tmpdir, monkeypatch, disable_resize): """ Generator should provide provide correct context to the album template. """ render_mock = MagicMock() monkeypatch.setattr(renderer, 'render', render_mock) album = Album("album", "The Album", date(2017, 6, 24), []) gallery = Gallery("Testin Gallery", "The Tester", [album]) generate(gallery, str(tmpdir)) render_mock.assert_called_with( "album.html", str(tmpdir.join("album", "index.html")), {'album': album, 'gallery': gallery, 'current_year': date.today().year} ) def test_resize_thumb
nail(tmpdir, gallery_with_one_photo): """Generator should create thumbnail file.""" generate(gallery_with_one_photo, str(tmpdir)) thumb_path = tmpdir.join("album", "thumb", "photo.jpg") assert thumb_path.exists() assert imagesize.get(str(thumb_path)) <= (300, 200) def test_resize_large(tmpdir, gallery_with_one_photo): """Generator should create large resized file.""" generate(gall
ery_with_one_photo, str(tmpdir)) large_path = tmpdir.join("album", "large", "photo.jpg") assert large_path.exists() assert imagesize.get(str(large_path)) <= (1500, 1000) def test_resize_existing(tmpdir, gallery_with_one_photo): """When resized image allready exists, do not resize it again.""" thumb_path = tmpdir.join("album", "thumb", "photo.jpg") large_path = tmpdir.join("album", "large", "photo.jpg") thumb_path.ensure() large_path.ensure() original_thumb_mtime = thumb_path.mtime() original_large_mtime = large_path.mtime() generate(gallery_with_one_photo, str(tmpdir)) assert thumb_path.mtime() == original_thumb_mtime assert large_path.mtime() == original_large_mtime def test_resized_images_metadata(tmpdir, gallery_with_one_photo): """Generator should fill resized images metadata in the Photo.""" generate(gallery_with_one_photo, str(tmpdir)) photo = next(gallery_with_one_photo.albums[0].photos) assert photo.thumb.url == "thumb/photo.jpg" assert photo.thumb.size <= (300, 200) assert photo.large.url == "large/photo.jpg" assert photo.large.size <= (1500, 1000) def test_copy_assets(tmpdir, disable_resize): """Generator should copy assets directory into output.""" gallery = Gallery("", "", []) generate(gallery, str(tmpdir)) assert tmpdir.join("assets", "kaleidoscope.js").exists() assert tmpdir.join("assets", "kaleidoscope.css").exists() def test_assets_directory_cleaned(tmpdir, disable_resize): """Generator should clean up existing assets directory.""" extra_file = tmpdir.join("assets", "existing-file.txt") extra_file.ensure() generate(Gallery("", "", []), str(tmpdir)) assert not extra_file.exists() def test_generator_reporting_events(gallery_with_three_photos, tmpdir, disable_resize): """Generator should report important events using provided reporter.""" listener = MagicMock(spec=DefaultListener) generate(gallery_with_three_photos, tmpdir, listener) album = gallery_with_three_photos.albums[0] assert listener.starting_album.call_args == call(album, 3) assert listener.finishing_album.called assert listener.resizing_photo.call_count == 3 def test_counting_photos_to_resize( gallery_with_three_photos, tmpdir, disable_resize): """Listener should receive count of photos that would be really resized.""" # Let's make 1.jpg already resized => 2 photos would remain tmpdir.join("album", "large", "f1.jpg").ensure() tmpdir.join("album", "thumb", "f1.jpg").ensure() listener = MagicMock(spec=DefaultListener) generate(gallery_with_three_photos, tmpdir, listener) album = gallery_with_three_photos.albums[0] assert listener.starting_album.call_args == call(album, 2) assert listener.resizing_photo.call_count == 2 @pytest.fixture def gallery_with_one_photo(): photo_path = os.path.join(os.path.dirname(__file__), 'data', 'photo.jpg') photo = Photo("photo.jpg", "", "", photo_path) album = Album("album", "The Album", date(2017, 6, 24), [Section("photos", [photo])]) return Gallery("Testin Gallery", "The Tester", [album]) @pytest.fixture def gallery_with_three_photos(): photo_path = os.path.join(os.path.dirname(__file__), 'data', 'photo.jpg') photos = [Photo("f%d.jpg" % (i,), "", "", photo_path) for i in range(3)] album = Album("album", "The Album", date(2017, 6, 24), [Section("photos", photos)]) return Gallery("Testing Gallery", "The Tester", [album]) @pytest.fixture def disable_resize(monkeypatch): """Replace image resize with dummy function and provide constant size.""" monkeypatch.setattr(generator, 'resize', MagicMock()) monkeypatch.setattr(imagesize, 'get', MagicMock(return_value=(42, 42)))
import announcements, users, corporate, api, volunteer, teams, innovation def configure_routes(app): app.add_url_rule('/', 'landing', view_func=users.views.landing, methods=['GET']) # Signing Up/Registration app.add_url_rule('/register', 'sign-up', view_func=users.views.sign_up, methods=['GET', 'POST']) app.add_url_rule('/callback', 'callback', view_func=users.views.callback, methods=['GET']) app.add_url_rule('/complete_mlh_registration', 'complete-mlh-registration', view_func=users.views.complete_mlh_registration, methods=['GET', 'POST']) app.add_url_rule('/complete_registration', 'complete-registration', view_func=users.views.complete_registration, methods=['GET', 'POST']) app.add_url_rule('/login', 'login', view_func=users.views.login, methods=['GET', 'POST']) app.add_url_rule('/logout', 'logout', view_func=users.views.logout, methods=['GET']) app.add_url_rule('/login/reset', 'forgot-password', view_func=users.views.forgot_password, methods=['GET', 'POST']) app.add_url_rule('/login/reset/<token>', 'reset-password', view_func=users.views.reset_password, methods=['GET', 'POST']) app.add_url_rule('/register/confirm/<token>', 'confirm-account', view_func=users.views.confirm_account, methods=['GET']) # User action pages app.add_url_rule('/edit_profile', 'edit-profile', view_func=users.views.edit_profile, methods=['GET', 'POST']) app.add_url_rule('/dashboard', 'dashboard', view_func=users.views.dashboard, methods=['GET']) app.add_url_rule('/resend_confirmation_email', 'resend-confirmation-email', view_func=users.views.resend_confirmation, methods=['POST']) # app.add_url_rule('/profile/resume', 'view-own-resume', view_func=users.views.view_own_resume, methods=['GET']) # app.add_url_rule('/refresh', 'refresh-mlh-data', view_func=users.views.refresh_from_mlh, methods=['GET']) app.add_url_rule('/accept', 'accept-invite', view_func=users.views.accept, methods=['GET', 'POST']) app.add_url_rule('/accept/sign', 'sign', view_func=users.views.sign, methods=['GET', 'POST']) app.add_url_rule('/additional_status', 'additional-status', view_func=users.views.additional_status, methods=['GET']) app.add_url_rule('/accept_travel_reimbursement', 'accept-travel-reimbursement', view_func=users.views.accept_reimbursement, methods=['POST']) app.add_url_rule('/view_campus_ambassadors', 'view-campus-ambassadors', view_func=users.views.view_campus_ambassadors, methods=['GET']) # Team actions app.add_url_rule('/team', 'team', view_func=teams.views.team, methods=['GET', 'POST']) # Admin Pages app.add_url_rule('/admin', 'admin-dash', view_func=users.admin_views.admin_dashboard, methods=['GET']) app.add_url_rule('/admin/create-corp-user', 'create-corp', view_func=users.admin_views.create_corp_user, methods=['GET', 'POST']) app.add_url_rule('/admin/debug', 'debug-user', view_func=users.admin_views.debug_user, methods=['GET', 'POST']) app.add_url_rule('/admin/initial-create', 'initial-create', view_func=users.admin_views.initial_create, methods=['GET', 'POST']) app.add_url_rule('/admin/batch', 'batch-modify', view_func=users.admin_views.batch_modify, methods=['GET', 'POST']) app.add_url_rule('/admin/send-email', 'send-email', view_func=users.admin_views.send_email_to_users, methods=['GET', 'POST']) app.add_url_rule('/admin/volunteer-list', 'volunteer-list', view_func=volunteer.views.volunteer_list, methods=['GET']) app.add_url_rule('/admin/add-volunteer', 'add-volunteer', view_func=volunteer.views.add_volunteer, methods=['POST']) app.add_url_rule('/admin/reject', 'reject-users', view_func=users.admin_views.reject_users, methods=['GET', 'POST']) app.add_url_rule('/admin/accept-teams', 'accept-teams', view_func=users.admin_views.accept_teams, methods=['GET', 'POST']) app.add_url_rule('/admin/check-in', 'manual-check-in', view_func=users.admin_views.check_in_manual, methods=['GET', 'POST']) app.add_url_rule('/admin/sign/<user_id>', 'check-in-sign', view_func=users.admin_views.check_in_sign, methods=['GET', 'POST']) app.add_url_rule('/admin/check-in-post', 'manual-check-in-post', view_func=users.admin_views.check_in_post, methods=['POST']) app.add_url_rule('/admin/set-mlh-id', 'set-mlh-id', view_func=users.admin_views.set_mlh_id, methods=['GET', 'POST']) app.add_url_rule('/admin/job/<job_key>', 'worker-jobs', view_func=users.admin_views.job_view, methods=['GET']) # API app.add_url_rule('/api/announcements', 'announcements', view_func=announcements.views.announcement_list, methods=['GET']) app.add_url_rule('/api/announcements/create', 'create-announcement', view_func=announcements
.views.create_announcement, methods=['POST']) app.add_url_rule('/api/partners', 'partners', view_func=api.views.partner_list, methods=['GET']) app.add_url_rule('/api/schedule', 'schedule', view_func=api.views.schedule, methods=['GET']) app.add_url_rule('/api
/schedule/<day>', 'day-schedule', view_func=api.views.schedule_day, methods=['GET']) app.add_url_rule('/api/check-in', 'check-in-api', view_func=api.views.check_in, methods=['GET', 'POST']) app.add_url_rule('/api/passbook', 'passbook', view_func=api.views.passbook, methods=['POST']) # Corporate Portal app.add_url_rule('/corp/login', 'corp-login', view_func=corporate.views.login, methods=['GET', 'POST']) app.add_url_rule('/corp/login/reset', 'corp-forgot-password', view_func=corporate.views.forgot_password, methods=['GET', 'POST']) app.add_url_rule('/corp/login/reset/<token>', 'corp-reset-password', view_func=corporate.views.reset_password, methods=['GET', 'POST']) app.add_url_rule('/corp/setup/<token>', 'new-user-setup', view_func=corporate.views.new_user_setup, methods=['GET', 'POST']) app.add_url_rule('/corp', 'corp-dash', view_func=corporate.views.corporate_dash, methods=['GET', 'POST']) app.add_url_rule('/corp/search', 'corp-search', view_func=corporate.views.corporate_search, methods=['GET']) app.add_url_rule('/corp/search/results', 'search-results', view_func=corporate.views.search_results, methods=['POST']) app.add_url_rule('/corp/view/resume', 'resume-view', view_func=corporate.views.view_resume, methods=['GET']) app.add_url_rule('/corp/download/all-resumes', 'all-resume-download', view_func=corporate.views.download_all_resumes, methods=['GET']) app.add_url_rule('/innovation/auth', 'innovation-auth', view_func=innovation.views.auth, methods=['GET']) app.add_url_rule('/innovation/get-user-info', 'innovation-user-info', view_func=innovation.views.get_user_info, methods=['GET'])
as live_poll_main properties = { 'daemons': ['arbiter', 'receiver'], 'type': 'ws_nocout', 'external': True, } # called by the plugin manager to get a broker def get_instance(plugin): # info("[WS_Nocout] get_instance ...") instance = WsNocout(plugin) return instance # Main app var. Will be fill with our running module instance app = None # Check_MK home dir CHECK_MK_CONF_PATH = '/omd/dev_slave/slave_2/etc/check_mk/conf.d/wato/' CHECK_MK_BIN = '/omd/dev_slave/slave_2/bin/cmk' OLD_CONFIG = 'old_config.tar.gz' NEW_CONFIG = 'new_config.tar.gz' def get_commands(time_stamps, hosts, services, return_codes, outputs): """Composing a command list based on the information received in POST request """ commands = [] current_time_stamp = int(time.time()) def _compose_command(t, h, s, r, o): """Simple function to create a command from the inputs""" cmd = "" if not s or s == "": cmd = '[%s] PROCESS_HOST_CHECK_RESULT;%s;%s;%s' % (t if t is not No
ne else current_time_stamp, h, r, o) else: cmd = '[%s] PROCESS_SERVICE_CHECK_RESULT;%s;%s;%s;%s' % (t if t is not None else current_time_stamp, h,
s, r, o) logger.debug("[WS_Nocout] CMD: %s" % (cmd)) commands.append(cmd) # Trivial case: empty commmand list if (return_codes is None or len(return_codes) == 0): return commands # Sanity check: if we get N return codes, we must have N hosts. # The other values could be None if (len(return_codes) != len(hosts)): logger.error("[WS_Nocout] number of return codes (%d) does not match number of hosts (%d)" % (len(return_codes), len(hosts))) abort(400, "number of return codes does not match number of hosts") map(_compose_command, time_stamps, hosts, services, return_codes, outputs) logger.debug("[WS_Nocout] received command: %s" % (str(commands))) return commands def get_page(): commands_list = [] try: # Getting lists of informations for the commands time_stamp_list = [] host_name_list = [] service_description_list = [] return_code_list = [] output_list = [] time_stamp_list = request.forms.getall(key='time_stamp') logger.debug("[WS_Nocout] time_stamp_list: %s" % (time_stamp_list)) host_name_list = request.forms.getall(key='host_name') logger.debug("[WS_Nocout] host_name_list: %s" % (host_name_list)) service_description_list = request.forms.getall(key='service_description') logger.debug("[WS_Nocout] service_description_list: %s" % (service_description_list)) return_code_list = request.forms.getall(key='return_code') logger.debug("[WS_Nocout] return_code_list: %s" % (return_code_list)) output_list = request.forms.getall(key='output') logger.debug("[WS_Nocout] output_list: %s" % (output_list)) commands_list = get_commands(time_stamp_list, host_name_list, service_description_list, return_code_list, output_list) except Exception, e: logger.error("[WS_Nocout] failed to get the lists: %s" % str(e)) commands_list = [] #check_auth() # Adding commands to the main queue() logger.debug("[WS_Nocout] commands: %s" % str(sorted(commands_list))) for c in sorted(commands_list): ext = ExternalCommand(c) app.from_q.put(ext) # OK here it's ok, it will return a 200 code def do_restart(): # Getting lists of informations for the commands time_stamp = request.forms.get('time_stamp', int(time.time())) command = '[%s] RESTART_PROGRAM\n' % time_stamp #check_auth() # Adding commands to the main queue() logger.warning("[WS_Nocout] command: %s" % str(command)) ext = ExternalCommand(command) app.from_q.put(ext) # OK here it's ok, it will return a 200 code def do_reload(): # Getting lists of informations for the commands time_stamp = request.forms.get('time_stamp', int(time.time())) command = '[%s] RELOAD_CONFIG\n' % time_stamp #check_auth() # Adding commands to the main queue() logger.warning("[WS_Nocout] command: %s" % str(command)) ext = ExternalCommand(command) app.from_q.put(ext) # OK here it's ok, it will return a 200 code def do_recheck(): # Getting lists of informations for the commands time_stamp = request.forms.get('time_stamp', int(time.time())) host_name = request.forms.get('host_name', '') service_description = request.forms.get('service_description', '') logger.debug("[WS_Nocout] Timestamp '%s' - host: '%s', service: '%s'" % (time_stamp, host_name, service_description ) ) if not host_name: abort(400, 'Missing parameter host_name') if service_description: # SCHEDULE_FORCED_SVC_CHECK;<host_name>;<service_description>;<check_time> command = '[%s] SCHEDULE_FORCED_SVC_CHECK;%s;%s;%s\n' % (time_stamp, host_name, service_description, time_stamp) else: # SCHEDULE_FORCED_HOST_CHECK;<host_name>;<check_time> command = '[%s] SCHEDULE_FORCED_HOST_CHECK;%s;%s\n' % (time_stamp, host_name, time_stamp) # We check for auth if it's not anonymously allowed #check_auth() # Adding commands to the main queue() logger.debug("[WS_Nocout] command = %s" % command) ext = ExternalCommand(command) app.from_q.put(ext) # OK here it's ok, it will return a 200 code def do_downtime(): # Getting lists of informations for the commands action = request.forms.get('action', 'add') time_stamp = request.forms.get('time_stamp', int(time.time())) host_name = request.forms.get('host_name', '') service_description = request.forms.get('service_description', '') start_time = request.forms.get('start_time', int(time.time())) end_time = request.forms.get('end_time', int(time.time())) # Fixed is 1 for a period between start and end time fixed = request.forms.get('fixed', '1') # Fixed is 0 (flexible) for a period of duration seconds from start time duration = request.forms.get('duration', int('86400')) trigger_id = request.forms.get('trigger_id', '0') author = request.forms.get('author', 'anonymous') comment = request.forms.get('comment', 'No comment') logger.debug("[WS_Nocout] Downtime %s - host: '%s', service: '%s', comment: '%s'" % (action, host_name, service_description, comment)) if not host_name: abort(400, 'Missing parameter host_name') if action == 'add': if service_description: # SCHEDULE_SVC_DOWNTIME;<host_name>;<service_description>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> command = '[%s] SCHEDULE_SVC_DOWNTIME;%s;%s;%s;%s;%s;%s;%s;%s;%s\n' % ( time_stamp, host_name, service_description, start_time, end_time, fixed, trigger_id, duration, author, comment ) else: # SCHEDULE_HOST_DOWNTIME;<host_name>;<start_time>;<end_time>;<fixed>;<trigger_id>;<duration>;<author>;<comment> command = '[%s] SCHEDULE_HOST_DOWNTIME;%s;%s;%s;%s;%s;%s;%s;%s\n' % ( time_stamp, host_name, start_time, end_time, fixed, trigger_id, duration, author, comment ) if action == 'delete': if service_description: # DEL_ALL_SVC_DOWNTIMES;<host_name>;<service_description> command = '[%s] DEL_ALL_SVC_DOWNTIMES;%s;%s\n' % ( time_stamp, host_name, service_description) else: # DEL_ALL_SVC_DOWNTIMES;<host_name> command = '[%s] DEL_ALL_HOST_DOWNTIMES;%s\n' % ( time_stamp, host_name) # We check for auth if it's not anonymously allowed if app.username != 'anonymous': basic = parse_auth(request.environ.get('HTTP_AUTHORIZATION', '')) # Maybe the user not even ask for user/pass. If so, bail out if not basic: abort(401, 'Authentication required') # Maybe he do not give the good credential? if basic[0] != app.username or basic[1] != app.password: abort(403, 'Authentication denied') # Adding commands to the main queue() logger.debug("[WS_Nocout] command =
from __future__ import absolute_import from __future__ import division # Copyright (c) 2010-2016 openpyxl """Manage Excel date weirdness.""" # Python stdlib imports import datetime from datetime import timedelta, tzinfo import re from jdcal import ( gcal2jd, jd2gcal, MJD_0 ) from openpyxl.compat import lru_cache # constants MAC_EPOCH = datetime.date(1904, 1, 1) WINDOWS_EPOCH = datetime.date(1899, 12, 30) CALENDAR_WINDOWS_1900 = sum(gcal2jd(WINDOWS_EPOCH.year, WINDOWS_EPOCH.month, WINDOWS_EPOCH.day)) CALENDAR_MAC_1904 = sum(gcal2jd(MAC_EPOCH.year, MAC_EPOCH.month, MAC_EPOCH.day)) SECS_PER_DAY = 86400 EPOCH = datetime.datetime.utcfromtimestamp(0) W3CDTF_FORMAT = '%Y-%m-%dT%H:%M:%SZ' W3CDTF_REGEX = re.compile('(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.(\d{2}))?Z?') def datetime_to_W3CDTF(dt): """Convert from a datetime to a timestamp string.""" return datetime.datetime.strftime(dt, W3CDTF_FORMAT) def W3CDTF_to_datetime(formatted_string): """Convert from a timestamp string to a datetime object.""" match = W3CDTF_REGEX.match(formatted_string) dt = [int(v) for v in match.groups()[:6]] return datetime.datetime(*dt) @lru_cache() def to_excel(dt, offset=CALENDAR_WINDOWS_1900): jul = sum(gcal2jd(dt.year, dt.month, dt.day)) - offset if jul <= 60 and offset == CALENDAR_WINDOWS_1900: jul -= 1 if hasattr(dt, 'time'): jul += time_to_days(dt) return jul @lru_cache() def from_excel(value, offset=CALENDAR_WINDOWS_1900): if value is None: return if 1 < value < 60 and offset == CALENDAR_WINDOWS_1900: value += 1 parts = list(jd2gcal(MJD_0, value + offset - MJD_0)) _, fraction = divmod(value, 1) jumped = (parts[-1] == 0 and fraction > 0) diff = datetime.timedelta(days=fraction) if 0 < abs(value) < 1: return days_to_time(diff) if not jumped: return datetime.datetime(*parts[:3]) + diff else: return datetime.datetime(*parts[:3] + [0]) class GMT(tzinfo): def utcoffset(self, dt): return timedelta(0) def dst(self, dt): return timedelta(0) def tzname(self,dt): return "GMT" try: from datetime import timezone UTC = timezone(timedelta(0)) except ImportError: # Python 2.6 UTC = GMT() @lru_cache() def time_to_days(value): """Convert a time value to fractions of day""" if value.tzinfo is not None: value = value.astimezone(UTC) return ( (value.hour * 3600) + (value.minute * 60) + value.second + value.microsecond / 10**6 ) / SECS_PER_DAY @lru_cache() def timedelta_to_days(value): """Convert a timedelta value to fractions of a day""" if not hasattr(value, 'total_seconds'): secs = (value.microseconds + (value.seconds + value.days * SECS_PER_DAY) * 10**6) / 10**6 else: secs =value.total_seconds() return secs / SECS_PER_DAY
@lru_cache() def days_to_time(value):
mins, seconds = divmod(value.seconds, 60) hours, mins = divmod(mins, 60) return datetime.time(hours, mins, seconds, value.microseconds)
tabsClass import TabClass import simplejson from subprocess import Popen, PIPE, STDOUT import roslib import signal roslib.load_manifest('qbo_webi'); import rospy import time from uuid import getnode as get_mac from poster.encode import multipart_encode from poster.streaminghttp import register_openers import urllib2 class VoiceRecognitionManager(TabClass): def __init__(self,language): self.ipWavServer = "audio.openqbo.org" self.portWavServer="8588" self.language = language s
elf.juliusPath=roslib.packages.get_pkg_dir("qbo_listen") self.juliusAMPath="/usr/share/qbo-julius-model/" self.htmlTemplate = Template(filename='voiceRecognition/templates/v
oiceRecognitionTemplate.html') self.jsTemplate = Template(filename='voiceRecognition/templates/voiceRecognitionTemplate.js') self.tmpdir="/tmp/" self.LMPaths="/config/LM/" self.LMFileName="/sentences.conf" self.PhonemsFileName="/phonems" self.TiedlistFileName="/tiedlist" self.languages_names={'en':'English','es':'Spanish','pt':'Português','de':'Deutsch','fr':'Français','it':'Italiano'} self.path = roslib.packages.get_pkg_dir("qbo_webi")+"/src/voiceRecognition/" self.lan = self.language["current_language"] self.mac = get_mac() self.p = None @cherrypy.expose def voiceRecognitionJs(self, parameters=None): self.lan = self.language["current_language"] return self.jsTemplate.render(language=self.language) def getLanguages(self): try: dirList=os.listdir(self.juliusPath+self.LMPaths) dirList.sort() except: dirList=-1 return dirList def isQboListenInstalled(self): if self.getLanguages()==-1: return False else: return True def getLanguageModels(self,language): try: dirList=os.listdir(self.juliusPath+self.LMPaths+language) dirList.sort() except: dirList=-1 return dirList def getLMSentences(self,language,model): try: f = open(self.juliusPath+self.LMPaths+language+"/"+model+self.LMFileName,'r') return f.read() except: sentences="" return sentences @cherrypy.expose def getModels(self,lang): modelList="" try: dirList=os.listdir(self.juliusPath+self.LMPaths+lang) dirList.sort() for model in dirList: modelList=modelList+model+"::" modelList=modelList[:-2] except: modelList=-1 return modelList @cherrypy.expose def test1(self,lang,text): text=text.encode("utf-8") f = open(self.tmpdir+'LModel', 'w') f.write(text) f.close() words=gen_grammar.verrors(self.tmpdir+'LModel',self.juliusAMPath+lang+"/"+self.PhonemsFileName) if words==0: return "" else: wordsList="" for word in words: wordsList=wordsList+word+"::" wordsList=wordsList[:-2] return wordsList @cherrypy.expose def test2(self,lang,text): errorlist="" text=text.encode("utf-8") print text wordlist=text.split() print wordlist for word in wordlist: if word[0]!="[" and word[0]!="<": print word f = open(self.tmpdir+'word', 'w') f.write("[sentence]\n") f.write(word) f.close() gen_grammar.createvoca(self.tmpdir+'word', self.juliusAMPath+lang+"/"+self.PhonemsFileName, self.tmpdir+'word') print self.tmpdir+'word' print self.juliusAMPath+lang+"/"+self.TiedlistFileName if gen_grammar.perrors(self.tmpdir+'word.voca',self.juliusAMPath+lang+"/"+self.TiedlistFileName)!=0: errorlist=errorlist+word+"::" errorlist=errorlist[:-2] return errorlist.upper() @cherrypy.expose def saveToFile(self,lang,text,model): try: #print self.juliusPath+self.LMPaths+language+"/"+model+self.LMFileName text=text.encode("utf-8") f = open(self.juliusPath+self.LMPaths+lang+"/"+model+self.LMFileName,'w') f.write(text) f.close() gen_grammar.compilegrammar(model,lang) subprocess.Popen("roslaunch qbo_listen voice_recognizer.launch".split()) except: return "ERROR: Cant write the file" return "" @cherrypy.expose def getFile(self,lang="",model=""): if lang=="" or model=="": return "ERROR: lang:"+lang+"; model:"+model else: #print self.getLMSentences(lang,model) return self.getLMSentences(lang,model) @cherrypy.expose def index(self): tmp="" if self.isQboListenInstalled(): for lang in self.getLanguages(): for LM in self.getLanguageModels(lang): text= self.getLMSentences(lang,LM) break break return self.htmlTemplate.render(language=self.language,lannames=self.languages_names,alllanguage=self.getLanguages()) else: return "Qbo listen not installed" # return self.htmlTemplate.render(language=self.language) @cherrypy.expose def rec(self): # n = self.getLenght("Arturo","sp") # print "***** "+n #Borramos la anterior grabacion, si la habia try: cmd="rm "+self.path+"tmp/*" self.p = Popen(cmd.split()) except ValueError: print "Nada que borrar" ''' try: cmd="rm "+self.path+"/*_en" self.p = Popen(cmd.split()) except ValueError: print "Nada que borrar" try: cmd="rm "+path+"/*sp" print cmd self.p = Popen(cmd.split()) except ValueError: print "Nada que borrar" ''' self.filename = str(self.mac)+"_"+self.lan #filename = filename.replace("\"","") # filename = "tmp.wav" print "FILENAME == "+self.filename print "grabnando!!!! "+self.path+"tmp/"+self.filename cmd="arecord -f S16_LE -r 44100 -c 1 "+self.path+"tmp/"+self.filename self.p = Popen(cmd.split()) name="oleole" return name @cherrypy.expose def stop(self): if(self.p==None): print "P ES NULL!!??" else: print "matar grabacin" self.p.send_signal(signal.SIGINT) cmd="python "+self.path+"sendWav2Server.py "+self.path+"tmp/"+self.filename+" "+self.ipWavServer+" "+self.portWavServer print cmd out = runCmd(cmd) print out[0] if out[1] != "": print "Error" return "error" return unicode(out[0],'utf8') @cherrypy.expose def play(self): print "play sound" os.system('aplay '+self.path+"tmp/"+self.filename) return "ok" @cherrypy.expose def save(self,transcripcion): print "SAVE! transcripcion="+transcripcion cmd="python "+self.path+"sendTranscription2Server.py "+str(self.mac)+" \""+transcripcion+"\" "+self.lan+" "+self.ipWavServer+" "+self.portWavServer print cmd out = runCmd(cmd) if out[1] != "": print "Error "+out[1] return "error" return out[0] # return "ok" def runCmd(cmd, timeout=None): ''' Will execute a command, read the output and return it back. @param cmd: command to execute @param timeout: process timeout in seconds @return: a tuple of three: first stdout, then stderr, then exit code @raise OSError: on missing command or if a timeout was reached ''' ph_out = None # process output ph_err = None # stderr ph_ret = None # return code p = subprocess.Popen(cmd,
Debian, Ubuntu, Fedora, RedHat, openSUSE, Linaro, ScientificLinux, Arch, CentOS, AMI. - Any distribution that uses systemd as their init system. - Note, this module does *NOT* modify /etc/hosts. You need to modify it yourself using other modules like template or replace. options: name: required: true description: - Name of the host ''' EXAMPLES = ''' - hostname: name=web01 ''' import socket from distutils.version import LooseVersion # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.facts import * from ansible.module_utils._text import to_bytes, to_native class UnimplementedStrategy(object): def __init__(self, module): self.module = module def get_current_hostname(self): self.unimplemented_error() def set_current_hostname(self, name): self.unimplemented_error() def get_permanent_hostname(self): self.unimplemented_error() def set_permanent_hostname(self, name): self.unimplemented_error() def unimplemented_error(self): platform = get_platform() distribution = get_distribution() if distribution is not None: msg_platform = '%s (%s)' % (platform, distribution) else: msg_platform = platform self.module.fail_json( msg='hostname module cannot be used on platform %s' % msg_platform) class Hostname(object): """ This is a generic Hostname manipulation class that is subclassed based on platform. A subclass may wish to set different strategy instance to self.strategy. All subclasses MUST define platform and distribution (which may be None). """ platform = 'Generic' distribution = None strategy_class = UnimplementedStrategy def __new__(cls, *args, **kwargs): return load_platform_subclass(Hostname, args, kwargs) def __init__(self, module): self.module = module self.name = module.params['name'] if self.platform == 'Linux' and Facts(module).is_systemd_managed(): self.strategy = SystemdStrategy(module) else: self.strategy = self.strategy_class(module) def get_current_hostname(self): return self.strategy.get_current_hostname() def set_current_hostname(self, name): self.strategy.set_current_hostname(name) def get_permanent_hostname(self): return self.strategy.get_permanent_hostname() def set_permanent_hostna
me(self, name): self.strategy.set_permanent_hostname(name) class GenericStrategy(object): """ This is a generic Hostname manipulation strategy class. A subclass may wish to override some or all of these methods. - get_current_hostname() - get_permanent_hostname() - set_current_hostname(name) - set_perma
nent_hostname(name) """ def __init__(self, module): self.module = module self.hostname_cmd = self.module.get_bin_path('hostname', True) def get_current_hostname(self): cmd = [self.hostname_cmd] rc, out, err = self.module.run_command(cmd) if rc != 0: self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err)) return to_native(out).strip() def set_current_hostname(self, name): cmd = [self.hostname_cmd, name] rc, out, err = self.module.run_command(cmd) if rc != 0: self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err)) def get_permanent_hostname(self): return None def set_permanent_hostname(self, name): pass # =========================================== class DebianStrategy(GenericStrategy): """ This is a Debian family Hostname manipulation strategy class - it edits the /etc/hostname file. """ HOSTNAME_FILE = '/etc/hostname' def get_permanent_hostname(self): if not os.path.isfile(self.HOSTNAME_FILE): try: open(self.HOSTNAME_FILE, "a").write("") except IOError: err = get_exception() self.module.fail_json(msg="failed to write file: %s" % str(err)) try: f = open(self.HOSTNAME_FILE) try: return f.read().strip() finally: f.close() except Exception: err = get_exception() self.module.fail_json(msg="failed to read hostname: %s" % str(err)) def set_permanent_hostname(self, name): try: f = open(self.HOSTNAME_FILE, 'w+') try: f.write("%s\n" % name) finally: f.close() except Exception: err = get_exception() self.module.fail_json(msg="failed to update hostname: %s" % str(err)) # =========================================== class SLESStrategy(GenericStrategy): """ This is a SLES Hostname strategy class - it edits the /etc/HOSTNAME file. """ HOSTNAME_FILE = '/etc/HOSTNAME' def get_permanent_hostname(self): if not os.path.isfile(self.HOSTNAME_FILE): try: open(self.HOSTNAME_FILE, "a").write("") except IOError: err = get_exception() self.module.fail_json(msg="failed to write file: %s" % str(err)) try: f = open(self.HOSTNAME_FILE) try: return f.read().strip() finally: f.close() except Exception: err = get_exception() self.module.fail_json(msg="failed to read hostname: %s" % str(err)) def set_permanent_hostname(self, name): try: f = open(self.HOSTNAME_FILE, 'w+') try: f.write("%s\n" % name) finally: f.close() except Exception: err = get_exception() self.module.fail_json(msg="failed to update hostname: %s" % str(err)) # =========================================== class RedHatStrategy(GenericStrategy): """ This is a Redhat Hostname strategy class - it edits the /etc/sysconfig/network file. """ NETWORK_FILE = '/etc/sysconfig/network' def get_permanent_hostname(self): try: f = open(self.NETWORK_FILE, 'rb') try: for line in f.readlines(): if line.startswith('HOSTNAME'): k, v = line.split('=') return v.strip() finally: f.close() except Exception: err = get_exception() self.module.fail_json(msg="failed to read hostname: %s" % str(err)) def set_permanent_hostname(self, name): try: lines = [] found = False f = open(self.NETWORK_FILE, 'rb') try: for line in f.readlines(): if line.startswith('HOSTNAME'): lines.append("HOSTNAME=%s\n" % name) found = True else: lines.append(line) finally: f.close() if not found: lines.append("HOSTNAME=%s\n" % name) f = open(self.NETWORK_FILE, 'w+') try: f.writelines(lines) finally: f.close() except Exception: err = get_exception() self.module.fail_json(msg="failed to update hostname: %s" % str(err)) # =========================================== class SystemdStrategy(GenericStrategy): """ This is a Systemd hostname manipulation strategy class - it uses the hostnamectl command. """ def get_current_hostname(self): cmd = ['hostname'] rc, out, err = self.module.run_command(cmd) if rc != 0: self.module.fail_json(msg="
#coding: utf-8 import os from setuptools import setup README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read() # Allow setup.py to be
run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-dbmessages', version='0.2.0a', packages=['dbmessages'], in
clude_package_data=True, license='BSD License', description='Request-independent messaging for Django on top of contrib.messages', long_description=README, author='Upwork, Anton Strogonoff', author_email='python@upwork.com', maintainer='Anton Strogonoff', maintainer_email='anton@strogonoff.name', download_url='http://github.com/strogonoff/django-dbmessages', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
#!/usr/bin/env python # -*- coding:utf-8 -*- """ @author: Will """ from django import forms from app01 import models class ImportFrom(forms.Form): HOST_TYPE=((1,"001"),(
2,"002")) #替換爲文件 host_type = forms.IntegerField( widget=forms.Select(choices=HOST_TYPE) ) hostname = forms.CharField() def __init__(self,*args,**kwargs): super(ImportFrom,self).__init__(*args,**kwargs) HOST_TYPE=((1,"001"),(2,"002")) #替換爲文件 self.fields['host_type'].widget.choices = models.userInfo.objects.all().values_list("id","name") models.userInfo.objects.get()
models.userInfo.objects.filter()
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Date : 2016-09-01 22:26:01 # @Author : Your Name (you@example.org) # @Link : http://example.org # @Version : $Id$ import os import threading import requests import lxml from threading import Thread from bs4 impor
t BeautifulSoup import sys reload(sys) sys.setdefaultencoding('utf-8') pic_path = 'pic/' # 保存文件路径 URL = 'http://www.nanrenwo.net/z/tupian/hashiqitupian/' URL1 = 'http://www.nanrenwo.net/' class Worker(threading.Thread): def __init__(self, url, img, filename): super(Worker, self).__init__() self.url = url self.img = img self.fi
lename = filename def run(self): try: u = self.url + self.img r = requests.get(u, stream=True) with open(self.filename, 'wb') as fd: for chunk in r.iter_content(4096): fd.write(chunk) except Exception, e: raise def get_imgs(url): t = 1 r = requests.get(url, stream=True) soup = BeautifulSoup(r.text, 'lxml') myimg = [img.get('src') for img in soup.find(id='brand-waterfall').find_all('img')] # 查询id下所有img元素 print 'myimg:', myimg for img in myimg: pic_name = pic_path + str(t) + '.jpg' # img_src = img.get('src') print 'img: ', img # self.download_pic(URL1,img,pic_name) #request Url,img src,picture name w = Worker(URL1, img, pic_name) w.start() t += 1 get_imgs(URL)
""" .. _tut_stats_cluster_source_2samp: ========================================================================= 2 samples permutation test on source data with spatio-temporal clustering ========================================================================= Tests if the source space data are significantly different between 2 groups of subjects (simulated here using one subject's data). The multiple comparisons problem is addressed with a cluster-level permutation test across space and time. """ # Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr> # Eric Larson <larson.eric.d@gmail.com> # License: BSD (3-clause) import os.path as op import numpy as np from scipy import stats as stats import mne from mne import spatial_tris_connectivity, grade_to_tris from mne.stats import spatio_temporal_cluster_test, summarize_clusters_stc from mne.datasets import sample print(__doc__) ############################################################################### # Set parameters data_path = sample.data_path() stc_fname = data_path + '/MEG/sample/sample_audvis-meg-lh.stc' subjects_dir = data_path + '/subjects' # Load stc to in common cortical space (fsaverage) stc = mne.read_source_estimate(stc_fname) stc.resample(50) stc = mne.morph_data('sample', 'fsaverage', stc, grade=5, smooth=20, subjects_dir=subjects_dir) n_vertices_fsave, n_times = stc.data.shape tstep = stc.tstep n_subjects1, n_subjects2 = 7, 9 print('Simulating data for %d and %d subjects.' % (n_subjects1, n_subjects2)) # Let's make sure our results replicate, so set the seed. np.random.seed(0) X1 = np.random.randn(n_vertices_fsave, n_times, n_subjects1) * 10 X2 = np.random.randn(n_vertices_fsave, n_times, n_subjects2) * 10 X1[:, :, :] += stc.data[:, :, np.newaxis] # make the activity bigger for the second set of subjects X2[:, :, :] += 3 * stc.data[:, :, np.newaxis] # We want to compare the overall activity levels for each subject X1 = np.abs(X1) # only magnitude X2 = np.abs(X2) # only magnitude ############################################################################### # Compute statistic # To use an algorithm optimized for spatio-temporal clustering, we # just pass the spatial connectivity matrix (instead of spatio-temporal) print('Computing connectivity.') connectivity = spatial_tris_connectivity(grade_to_tris(5)) # Note that X needs to be a list of multi-dimensional array of shape # samples (subjects_k) x time x space, so we permute dimensions X1 = np.transpose(X1, [2, 1, 0]) X2 = np.transpose(X2, [2, 1, 0]) X = [X1, X2] # Now let's actually do the
clustering. This can take a long time... # Here we set the threshold quite high to reduce computation. p_threshold = 0.0001 f_threshold = stats.distributions.f.ppf(1. - p_threshold / 2., n_subjects1 - 1, n_subjects2 - 1) print('Clustering.') T_obs, clusters, cluster_p_values, H0 = clu =\ spatio_temporal_cluster_test(X, connectivity=connectivity, n_jobs=2, threshol
d=f_threshold) # Now select the clusters that are sig. at p < 0.05 (note that this value # is multiple-comparisons corrected). good_cluster_inds = np.where(cluster_p_values < 0.05)[0] ############################################################################### # Visualize the clusters print('Visualizing clusters.') # Now let's build a convenient representation of each cluster, where each # cluster becomes a "time point" in the SourceEstimate fsave_vertices = [np.arange(10242), np.arange(10242)] stc_all_cluster_vis = summarize_clusters_stc(clu, tstep=tstep, vertices=fsave_vertices, subject='fsaverage') # Let's actually plot the first "time point" in the SourceEstimate, which # shows all the clusters, weighted by duration subjects_dir = op.join(data_path, 'subjects') # blue blobs are for condition A != condition B brain = stc_all_cluster_vis.plot('fsaverage', hemi='both', colormap='mne', subjects_dir=subjects_dir, time_label='Duration significant (ms)') brain.set_data_time_index(0) brain.show_view('lateral') brain.save_image('clusters.png')
# Download the Python helper librar
y from twilio.com/docs/python/install from twilio.rest.ip_messaging import TwilioIpMessagingClient # Your Account Sid and
Auth Token from twilio.com/user/account account = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" token = "your_auth_token" client = TwilioIpMessagingClient(account, token) service = client.services.get(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") channel = service.channels.get(sid="CHXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") messages = channel.messages.list() for m in messages: print(m)
from typing import Optional, Tuple import os import sys from distutils.version import LooseVersion from version import PROVISION_VERSION from scripts.lib.zulip_tools import get_dev_uuid_var_path def get_major_version(v): # type: (str) -> int return int(v.split('.')[0]) def get_version_file(): # type: () -> str uuid_var_path = get_dev_uuid_var_path() return os.path.join(uuid_var_path, 'provision_version') PREAMBLE = ''' Before we run tests, we make sure your provisioning version is correct by looking at var/provision_version, which
is at version %s, and we compare it to the version in source control (version.py), which is %s. ''' def preamble(version): # type: (str) -> str text = PREAMBLE % (version, PROVISION_VERSION) text += '\n' return text NEED_TO_DOWNGRADE = ''' It loo
ks like you checked out a branch that expects an older version of dependencies than the version you provisioned last. This may be ok, but it's likely that you either want to rebase your branch on top of upstream/master or re-provision your VM. Do this: `./tools/provision` ''' NEED_TO_UPGRADE = ''' It looks like you checked out a branch that has added dependencies beyond what you last provisioned. Your command is likely to fail until you add dependencies by provisioning. Do this: `./tools/provision` ''' def get_provisioning_status(): # type: () -> Tuple[bool, Optional[str]] version_file = get_version_file() if not os.path.exists(version_file): # If the developer doesn't have a version_file written by # a previous provision, then we don't do any safety checks # here on the assumption that the developer is managing # their own dependencies and not running provision. return True, None with open(version_file, 'r') as f: version = f.read().strip() # Normal path for people that provision--we're all good! if version == PROVISION_VERSION: return True, None # We may be more provisioned than the branch we just moved to. As # long as the major version hasn't changed, then we should be ok. if LooseVersion(version) > LooseVersion(PROVISION_VERSION): if get_major_version(version) == get_major_version(PROVISION_VERSION): return True, None else: return False, preamble(version) + NEED_TO_DOWNGRADE return False, preamble(version) + NEED_TO_UPGRADE def assert_provisioning_status_ok(force): # type: (bool) -> None if not force: ok, msg = get_provisioning_status() if not ok: print(msg) print('If you really know what you are doing, use --force to run anyway.') sys.exit(1)
import json import pytest from indy import crypto, did, error @pytest.mark.asyncio async def test_auth_crypt_works_for_created_key(wallet_handle, seed_my1, verkey_my2, message): verkey = await did.create_key(wallet_handle, json.dumps({'seed': seed_my1})) await crypto.auth_crypt(wallet_handle, verkey, verkey_my2, message) @pytest.mark.asyncio async def test_auth_crypt_works_for_unknown_sender_verkey(wallet_handle, verkey_my1, verkey_my2, message): with pytest.raises(error.WalletItemNotFound): await crypto.auth_crypt(wallet_handle, verkey_my1, verkey_my2, message) @pytest.mark.asyncio async def test_auth_crypt_works_for_invalid_handle(wal
let_handle, verkey_my1, verkey_my2, message): with pytest.raises(error.WalletInvalidHandle): invalid_wallet_handle = wallet_handle + 1 await crypto.auth_crypt(invalid_wallet_handle, verkey_my1, verkey_my2, message) @pytest.mark.asyncio async def test_auth_crypt_works_for_invalid_recipient_vk(wallet_handle, identit
y_trustee1, message): (_, key) = identity_trustee1 with pytest.raises(error.CommonInvalidStructure): await crypto.auth_crypt(wallet_handle, key, 'CnEDk___MnmiHXEV1WFgbV___eYnPqs___TdcZaNhFVW', message)
#!/usr/bin/python3 ############# # this is to be leaded by every module. # I think #import mysglobal as g # args,loggerr @every module ################# import logging from logzero import setup_logger,LogFormatter,colors import argparse import os,sys import json from blessings import Terminal import getpass # lockfile<= getuser #from threading import Thread # thread: i need accesible thread import uuid DEBUG=True config={} # global config, but not sure MYSEPATH=os.path.expanduser("~/.myservice") I_AM_INFINITE=False BOTTOMLINE_TEXT="no message" t = Terminal() ZMQ_REP_PORT=5678 RANDOM_STR = uuid.uuid4() user_name = os.getenv('USER') # for /var/run/screen/S-user #################################### # PARSER ARG ###################################### parser=argparse.ArgumentParser(description=""" ------------------------------------------------------------------ The tool to run services in userspace """,usage=""" myservice2 [-d] ... shows the executables in ~/.myservice myservice2 [-d] infinite ... run infinite (in terminal) myservice test ... test is without a path inside ~/.myservice myservice2 [-d] test enable ... introduces into .config.json myservice2 [-d] test disable myservice2 [-d] test never ... gray servicename and mode myservice2 [-d] test undef myservice2 [-d] test start myservice2 [-d] test stop ... kills and makes UNDEF myservice2 [-d] test perm4h ... run every 4 hours (it knows m,h,d) myservice2 [-d] reconfig ... when MANUAL edit to .confg.json is done script /dev/null ... do this when an ssh user without access to screen ------------------------------------------------------------------ VARIOUS TRICKS: myservice2 ...
looks for all executables; * ... already present in .config.json E ... atribute enable is there + or - ... attribute enable is true or false p ... attribute perm is ON; also a,x PATHS: when ~/.myservice/test/aaa myservice2 aaa enable : finds a path and adds into the .config.json myservice2 infinite ... runs the table in the terminal (only 1 instance possible) OR
connects to the screen -x myservice2_infinite """, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-d','--debug', action='store_true' , help='') #parser.add_argument('-s','--serfmsg', default='',nargs="+" , help='serf message to mmap') # list will come after #parser.add_argument('count', action="store", type=int) parser.add_argument('service', action="store", nargs="?") # nargs='+' : parser.add_argument('command', action="store", nargs="?") # nargs='+' #parser.add_argument('command', action="store") # print(""" # USAGE CASES: # ------------------------------------------------------------------ # ./myservice2.py -d infinite # ./myservice.py test enable # ------------------------------------------------------------------ # VARIOUS TRICKS: # subdir TTT # myservice2 TTT/aaa enable : adds into the config # # this was the last time about PATH!; from now on: # myservice2 aaa sock # ./myservice2.py -s myservice aaa # send command to mmap to test serf # # aaa must be status sock # """) args=parser.parse_args() #=========== path must exist if not os.path.isdir( os.path.expanduser("~/.myservice") ): #print(" directory exists") #else: print(" DIR NOT EXISTS") os.mkdir( os.path.expanduser("~/.myservice") ) ########################################### # LOGGING - after AGR PARSE ######################################## log_format = '%(color)s%(levelname)1.1s... %(asctime)s%(end_color)s %(message)s' # i... format LogFormatter.DEFAULT_COLORS[10] = colors.Fore.YELLOW ## debug level=10. default Cyan... loglevel=1 if args.debug==1 else 11 # all info, but not debug formatter = LogFormatter(fmt=log_format,datefmt='%Y-%m-%d %H:%M:%S') logfile=os.path.splitext( os.path.expanduser("~/.myservice/")+os.path.basename(sys.argv[0]) )[0]+'.log' logger = setup_logger( name="main",logfile=logfile, level=loglevel,formatter=formatter )#to 1-50 lockfile="/tmp/"+"myservice2_"+getpass.getuser()+".lock" lockfilepid=0
# Copyright 2015 - StackStorm, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy import yaml from mistral import exceptions as exc from mistral.lang import parser as spec_parser from mistral.tests.unit import base from mistral import utils class WorkflowSpecValidationTestCase(base.BaseTest): def __init__(self, *args, **kwargs): super(WorkflowSpecValidationTestCase, self).__init__(*args, **kwargs) # The relative resource path is ./mistral/tests/resources/workbook/v2. self._resource_path = 'workbook/v2' self._spec_parser = spec_parser.get_workflow_list_spec_from_yaml self._dsl_blank = { 'version': '2.0', 'test': { 'type': 'direct' } } self._dsl_tasks = { 'get': { 'action': 'std.http', 'input': { 'url': 'https://www.openstack.org' } }, 'echo': { 'action': 'std.echo', 'input': { 'output': 'This is a test.' } }, 'email': { 'action': 'std.email', 'input': { 'from_addr': 'mistral@example.com', 'to_addrs': ['admin@example.com'], 'subject': 'Test', 'body': 'This is a test.', 'smtp_server': 'localhost', 'smtp_password': 'password' } } } def _parse_dsl_spec(self, dsl_file=None, add_tasks=False, changes=None, expect_error=False): if dsl_file and add_tasks: raise Exception('The add_tasks option is not a valid ' 'combination with the dsl_file option.') if dsl_file: dsl_yaml = base.get_resource(self._resource_path + '/' + dsl_file) if changes: dsl_dict = yaml.safe_load(dsl_yaml) utils.merge_dicts(dsl_dict, changes) dsl_yaml = yaml.safe_dump(dsl_dict, default_flow_style=False) else: dsl_dict = copy.deepcopy(self._dsl_blank) if add_tasks: dsl_dict['test']['tasks'] = copy.deepcopy(self._dsl_tasks) if changes: utils.merge_dicts(dsl_dict, changes) dsl_yaml = yaml.safe_dump(dsl_dict, default_flow_style=False) if not expect_error: return self._spec_parser(dsl_yaml) else: return self.assertRaises( exc.DSLParsingException, self._spe
c_parser, dsl_yaml ) class WorkbookSpecValidationTestCase(WorkflowSpecValidationTestCase): def __init__(self, *args, **kwargs): super(WorkbookSpecValidationTes
tCase, self).__init__(*args, **kwargs) self._spec_parser = spec_parser.get_workbook_spec_from_yaml self._dsl_blank = { 'version': '2.0', 'name': 'test_wb' } def _parse_dsl_spec(self, dsl_file=None, add_tasks=False, changes=None, expect_error=False): return super(WorkbookSpecValidationTestCase, self)._parse_dsl_spec( dsl_file=dsl_file, add_tasks=False, changes=changes, expect_error=expect_error)
import requests import datetime import calendar class DeskTime(object): MAIN_URL = 'https://desktime.com/api/2/json/?{params}' def __init__(self, app_key, username, password): self.api_key = self._login(app_key, username, password) if self.api_key is None: raise Exception("Authorization error") pass def _login(self, app_key, username, password): auth = 'appkey={appkey}&action={action}&email={email}&password={password}' auth = auth.format(appkey=app_key, action='authorize', email=username, password=password) auth_url = self.MAIN_URL.format(params=auth) res = requests.get(auth_url) data = res.json() if not data.get(u'error', None): return data.get('api_key', None) return None def getAllDataForDate(self, date=datetime.datetime.now().date()): employees = 'apikey={apikey}&action=employees&date={date}' employees = employees.format(apikey=self.api_key, action='employees', date=date.isoformat()) url = self.MAIN_URL.format(params=employees) res = requests.get(url) data = res.json() if not data.get('error', None): return data return None def getMonth(self, year, month, with_weekends=False): monthrange = calendar.monthrange(year, month) today = datetime.datetime.now().date() data = [] resdata = {} for dayindex in range(monthrange[1]): day = dayindex + 1 date = datetime.date(year, month, day) if date > today and date.year == today.year and today.month == date.month: con
tinue elif date > today: return None if not with_weeke
nds and date.weekday() in (5, 6): continue data.append(self.getAllDataForDate(date)) for elem in data: resdata[elem.get('date')] = elem.get('employees') return data def getEmployee(self, employee_id): raise(NotImplementedError)
ER_ZONE, task_id="id" ) mock_hook.return_value.create_instance.side_effect = mock.Mock( side_effect=google.api_core.exceptions.GoogleAPICallError('error')) with self.assertRaises(google.api_core.exceptions.GoogleAPICallError): op.execute(None) mock_hook.assert_called_once_with() mock_hook.return_value.create_instance.assert_called_once_with( cluster_nodes=None, cluster_storage_type=None, instance_display_name=None, instance_id=INSTANCE_ID, instance_labels=None, instance_type=None, main_cluster_id=CLUSTER_ID, main_cluster_zone=CLUSTER_ZONE, project_id=PROJECT_ID, replica_cluster_id=None, replica_cluster_zone=None, timeout=None ) class BigtableClusterUpdateTest(unittest.TestCase): @parameterized.expand([ ('instance_id', PROJECT_ID, '', CLUSTER_ID, NODES), ('cluster_id', PROJECT_ID, INSTANCE_ID, '', NODES), ('nodes', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''), ], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0]) @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_empty_attribute(self, missing_attribute, project_id, instance_id, cluster_id, nodes, mock_hook): with self.assertRaises(AirflowException) as e: BigtableClusterUpdateOperator( project_id=project_id, instance_id=instance_id, cluster_id=cluster_id, nodes=nodes, task_id="id" ) err = e.exception self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) mock_hook.assert_not_called() @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_updating_cluster_but_instance_does_not_exists(self, mock_hook): mock_hook.return_value.get_instance.return_value = None with self.assertRaises(AirflowException) as e: op = BigtableClusterUpdateOperator( project_id=PROJECT_ID, instance_id=INSTANCE_ID, cluster_id=CLUSTER_ID, nodes=NODES, task_id="id" ) op.execute(None) err = e.exception self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format( INSTANCE_ID)) mock_hook.assert_called_once_with() mock_hook.return_value.update_cluster.assert_not_called() @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_updating_cluster_but_instance_does_not_exists_empty_project_id(self, mock_hook): mock_hook.return_value.get_instance.retu
rn_value = None with self.assertRaises(AirflowException) as e: op = BigtableClusterUpdateOperator( instance_id=INSTANCE_ID, cluster_id=CLUSTER_ID, nodes=NODES, task_id="id" ) op.execute(None) err = e.exception self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format( INSTANCE_ID)) mock_hook.assert_ca
lled_once_with() mock_hook.return_value.update_cluster.assert_not_called() @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_updating_cluster_that_does_not_exists(self, mock_hook): instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance) mock_hook.return_value.update_cluster.side_effect = mock.Mock( side_effect=google.api_core.exceptions.NotFound("Cluster not found.")) with self.assertRaises(AirflowException) as e: op = BigtableClusterUpdateOperator( project_id=PROJECT_ID, instance_id=INSTANCE_ID, cluster_id=CLUSTER_ID, nodes=NODES, task_id="id" ) op.execute(None) err = e.exception self.assertEqual( str(err), "Dependency: cluster '{}' does not exist for instance '{}'.".format( CLUSTER_ID, INSTANCE_ID) ) mock_hook.assert_called_once_with() mock_hook.return_value.update_cluster.assert_called_once_with( instance=instance, cluster_id=CLUSTER_ID, nodes=NODES) @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_updating_cluster_that_does_not_exists_empty_project_id(self, mock_hook): instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance) mock_hook.return_value.update_cluster.side_effect = mock.Mock( side_effect=google.api_core.exceptions.NotFound("Cluster not found.")) with self.assertRaises(AirflowException) as e: op = BigtableClusterUpdateOperator( instance_id=INSTANCE_ID, cluster_id=CLUSTER_ID, nodes=NODES, task_id="id" ) op.execute(None) err = e.exception self.assertEqual( str(err), "Dependency: cluster '{}' does not exist for instance '{}'.".format( CLUSTER_ID, INSTANCE_ID) ) mock_hook.assert_called_once_with() mock_hook.return_value.update_cluster.assert_called_once_with( instance=instance, cluster_id=CLUSTER_ID, nodes=NODES) @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_different_error_reraised(self, mock_hook): op = BigtableClusterUpdateOperator( project_id=PROJECT_ID, instance_id=INSTANCE_ID, cluster_id=CLUSTER_ID, nodes=NODES, task_id="id" ) instance = mock_hook.return_value.get_instance.return_value = mock.Mock(Instance) mock_hook.return_value.update_cluster.side_effect = mock.Mock( side_effect=google.api_core.exceptions.GoogleAPICallError('error')) with self.assertRaises(google.api_core.exceptions.GoogleAPICallError): op.execute(None) mock_hook.assert_called_once_with() mock_hook.return_value.update_cluster.assert_called_once_with( instance=instance, cluster_id=CLUSTER_ID, nodes=NODES) class BigtableInstanceDeleteTest(unittest.TestCase): @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_delete_execute(self, mock_hook): op = BigtableInstanceDeleteOperator( project_id=PROJECT_ID, instance_id=INSTANCE_ID, task_id="id" ) op.execute(None) mock_hook.assert_called_once_with() mock_hook.return_value.delete_instance.assert_called_once_with( project_id=PROJECT_ID, instance_id=INSTANCE_ID) @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_delete_execute_empty_project_id(self, mock_hook): op = BigtableInstanceDeleteOperator( instance_id=INSTANCE_ID, task_id="id" ) op.execute(None) mock_hook.assert_called_once_with() mock_hook.return_value.delete_instance.assert_called_once_with( project_id=None, instance_id=INSTANCE_ID) @parameterized.expand([ ('instance_id', PROJECT_ID, ''), ], testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0]) @mock.patch('airflow.contrib.operators.gcp_bigtable_operator.BigtableHook') def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_hook): with self.assertRaises(AirflowException) as e: BigtableInstanceDeleteOperator( project_id=project_id, instance_id=instance_id, task_id="id" ) err = e.exception
changeme if issparse(A): A = np.array(A.todense()) else: A = np.array(A) d1, d2 = np.shape(Cn) d, nr = np.shape(A) if max_number is None: max_number = nr x, y = np.mgrid[0:d1:1, 0:d2:1] pl.imshow(Cn, interpolation=None, cmap=cmap) cm = com(A, d1, d2) Bmat = np.zeros((np.minimum(nr, max_number), d1, d2)) for i in range(np.minimum(nr, max_number)): indx = np.argsort(A[:, i], axis=None)[::-1] cumEn = np.cumsum(A[:, i].flatten()[indx]**2) cumEn /= cumEn[-1] Bvec = np.zeros(d) Bvec[indx] = cumEn Bmat[i] = np.reshape(Bvec, np.shape(Cn), order='F') T = np.shape(Y)[-1] pl.close() fig = pl.figure() ax = pl.gca() ax.imshow(Cn, interpolation=None, cmap=cmap, vmin=np.percentile(Cn[~np.isnan(Cn)], 1), vmax=np.percentile(Cn[~np.isnan(Cn)], 99)) for i in range(np.minimum(nr, max_number)): pl.contour(y, x, Bmat[i], [thr]) if display_numbers: for i in range(np.minimum(nr, max_number)): ax.text(cm[i, 1], cm[i, 0], str(i + 1)) A3 = np.reshape(A, (d1, d2, nr), order='F') while True: pts = fig.ginput(1, timeout=0) if pts != []: print(pts) xx, yy = np.round(pts[0]).astype(np.int) coords_y = np.array(list(range(yy - dy, yy + dy + 1))) coords_x = np.array(list(range(xx - dx, xx + dx + 1))) coords_y = coords_y[(coords_y >= 0) & (coords_y < d1)] coords_x = coords_x[(coords_x >= 0) & (coords_x < d2)] a3_tiny = A3[coords_y[0]:coords_y[-1] + 1, coords_x[0]:coords_x[-1] + 1, :] y3_tiny = Y[coords_y[0]:coords_y[-1] + 1, coords_x[0]:coords_x[-1] + 1, :] dy_sz, dx_sz = np.shape(a3_tiny)[:-1] y2_tiny = np.reshape(y3_tiny, (dx_sz * dy_sz, T), order='F') a2_tiny = np.reshape(a3_tiny, (dx_sz * dy_sz, nr), order='F') y2_res = y2_tiny - a2_tiny.dot(C) y3_res = np.reshape(y2_res, (dy_sz, dx_sz, T), order='F') a__, c__, center__, b_in__, f_in__ = greedyROI( y3_res, nr=1, gSig=[np.floor(old_div(dx_sz, 2)), np.floor(old_div(dy_sz, 2))], gSiz=[dx_sz, dy_sz]) a_f = np.zeros((d, 1)) idxs = np.meshgrid(coords_y, coords_x) a_f[np.ravel_multi_index( idxs, (d1, d2), order='F').flatten()] = a__ A = np.concatenate([A, a_f], axis=1) C = np.concatenate([C, c__], axis=0) indx = np.argsort(a_f, axis=None)[::-1] cumEn = np.cumsum(a_f.flatten()[indx]**2) cumEn /= cumEn[-1] Bvec = np.zeros(d) Bvec[indx] = cumEn bmat = np.reshape(Bvec, np.shape(Cn), order='F') pl.contour(y, x, bmat, [thr]) pl.pause(.01) elif pts == []: break nr += 1 A3 = np.reshape(A, (d1, d2, nr), order='F') return A, C def app_vertex_cover(A): """ Finds an approximate vertex cover for a symmetric graph with adjacency matrix A. Args: A: boolean 2d array (K x K) Adjacency matrix. A is boolean with diagonal set to 0 Returns: L: A vertex cover of A Authors: Eftychios A. Pnevmatikakis, Simons Foundation, 2015 """ L = [] while A.any(): nz = np.nonzero(A)[0] # find non-zero edges u = nz[np.random.randint(0, len(nz))] A[u, :] = False A[:, u] = False L.append(u) return np.asarray(L) def update_order(A, new_a=None, prev_list=None, method='greedy'): '''Determines the update order of the temporal components given the spatial components by creating a nest of random approximate vertex covers Args: A: np.ndarray matrix of spatial components (d x K) new_a: sparse array spatial component that is added, in order to efficiently update the orders in online scenarios prev_list: list of list orders from previous iteration, you need to pass if new_a is not None Returns: O: list of sets list of subsets of components. The components of each subset can be updated in parallel lo: list length of each subset Written by Eftychios A. Pnevmatikakis, Simons Foundation, 2015 ''' K = np.shape(A)[-1] if new_a is None and prev_list is None: if method is 'greedy': prev_list, count_list = update_order_greedy(A, flag_AA=False) else: prev_list, count_list = update_order_random(A, flag_AA=False) return prev_list, count_list else: if new_a is None or prev_list is None: raise Exception( 'In the online update order you need to provide both new_a and prev_list') counter = 0 AA = A.T.dot(new_a) for group in prev_list: if AA[list(group)].sum() == 0: group.append(K) counter += 1 break if counter == 0: if prev_list is not None: prev_list = list(prev_list) prev_list.append([K]) count_list = [len(gr) for gr in prev_list] return prev_list, count_list def order_components(A, C): """Order components based on their maximum temporal value and size Args: A: sparse matrix (d x K) spatial components C: matrix or np.ndarray (K x T) temporal components Returns: A_or: np.ndarray ordered spatial components C_or: np.ndarray ordered temporal components srt: np.ndarray sorting mapping """ A = np.array(A.todense()) nA2 = np.sqrt(np.sum(A**2, axis=0)) K = len(nA2) A = np.array(np.matrix(A) * spdiags(old_div(1, nA2), 0, K, K)) nA4 = np.sum(A**4, axis=0)**0.25 C = np.array(spdiags(nA2, 0, K, K) * np.matrix(C)) mC = np.ndarray.max(np.array(C), axis=1) srt = np.argsort(nA4 * mC)[::-1] A_or = A[:, srt] * spdiags(nA2[srt], 0, K, K) C_or = spdiags(old_div(1., nA2[srt]), 0, K, K) * (C[srt, :]) return A_or, C_or, srt def update_order_random(A, flag_AA=True): """Determies the update order of temporal components using randomized partitions of non-overlapping components """ K = np.shape(A)[-1] if flag_AA: AA = A.copy() else: AA = A.T.dot(A) AA.setdiag(0) F = (AA) > 0 F = F.toarray() rem_ind = np.arange(K) O = [] lo = [] while len(rem_ind) > 0: L = np.sort(app_vertex_cover(F[rem_ind, :][:, rem_ind])) if L.size: ord_ind = set(rem_ind) - set(rem_ind[L]) rem_ind = rem_ind[L] else: ord_ind = set(rem_ind) rem_ind = [] O.append(ord_ind) lo.append(len(ord_ind)) return O[::-1], lo[::-1] def update_order_greedy(A, flag_AA=True): """Determines the update order of the temporal components this, given the spatial components using a greedy method Basically we can update the components that are not overlapping, in parallel Args: A: sparse crc matrix matrix of spatial components (d x K) OR: A.T.dot(A) matrix (d x d) if fl
ag_AA = true flag_AA: boolean (default true)
Returns: parllcomp: list of sets list of subsets of components. The components of each subset can be updated in parallel len_parrllcomp: list length of each subset Author: Eftychios A. Pnevmatikakis, Simons Foundation, 2017 """ K = np.shape(A)[-1] parllcomp:List = [] for i in range(K): new_list = True for ls in parllcomp: if flag_AA: if A[i, ls].nnz == 0: ls.append(i) new_list = False break else: if (A[:, i].T.dot(A[:, ls])).nnz == 0: ls.append(i)
for i in range(100
0000): def f(x, y=1
, *args, **kw): pass
# pylint: skip-file class GCPResourc
e(object): '''Object to represent a gcp resource''' def __init__(self, rname, rtype, project, zone): '''constructor for gcp resource''' self._name = rname self._type = rtype self._project = project self._zone = zone @property def name(self): '''property for name''' return self._name @property def type(self): '''property for type''' return self._type @property def project(self):
'''property for project''' return self._project @property def zone(self): '''property for zone''' return self._zone
__author__ = "Jacob Lydon" __copyright__ = "Copyright 2017" __credits__ = [] __license__ = "GPLv3"
__version__ = "0.1" __maintainer__ = "Jacob Lydon"
__email__ = "jlydon001@regis.edu" __status__ = "Development"
# /ciscripts/chec
k/python/__init__.py # # Module loader file for /ciscripts/check/python. # # See /LICENCE.md for Copyright
information """Module loader file for /ciscripts/check/python."""
# Build Code import os import subprocess import re class GCC: def __init__(self): self.enter_match = re.compile(r'Entering directory') self.leave_match = re.compile(r'Leaving directory') def can_build(self, dirname, ext): if ext in (".c", ".h", ".cpp", ".hpp"): files = [f.lower() for f in os.listdir(dirname)] if "makefile" in files: self.makefile_dir = dirname return True return False def run(self, ac
tion, output): args = ["make"] if action: args.append(action) print(args) proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) errorLines = [] while True: line = proc.stdout.readline().decode("utf-8")
if len(line) == 0: break output.write(line) if line.startswith("In file included from"): errorLines.append(line) else: idx = line.find("Entering directory") if idx >= 0: errorLines.append(line) else: idx = line.find("Leaving directory") if idx >= 0: errorLines.append(line) else: idx = line.find("warning:") if idx >= 0: errorLines.append(line) output.write(line) return errorLines def get_plugin(): return GCC()
import maya.cmds as cmds from . import renamer_settings as settings class FieldReplacer(object): def __init__(self): print 'Initializing jsRenamer FieldReplacer...' #replaceMaterial = self.replaceMaterial def checkTemplate(self,node): #availPos = ['C','L','R','LF','RF','LB','RB','U','B'] #availSuf=['GES','GEP','PLY','NRB'] #sel = cmds.ls(sl=1) #for node in sel: splitNode = node.split('_') #print splitNode #print splitNode[0][-3:] #check if correct amount of fields if len(splitNode) == 5: return True else: return False ########################################## #####REPLACE FIELD######################## ########################################## def replaceMaterial(self, args=None): ReplaceSel = cmds.ls(sl=1) prefixReplace = cmds.textField('materialField',query=True,tx=1) prefixReplace= prefixReplace if prefixReplace == '': pass else: for each in ReplaceSel: if self.checkTemplate(each) == True: if '|' in each: replacerOldName=each.split('|')[-1] else: replacerOldName = each prefixSplit = replacerOldName.split('_',1) prefixReplaceName = prefixReplace+ '_' +str(prefixSplit[1]) #print prefixReplaceName cmds.rename(each,prefixReplaceName) else: cmds.error(each+' does not match naming Template (default_C_default_0000_???)') def replacePosition(self, args=None): ReplaceSel = cmds.ls(sl=1) positionReplace = cmds.optionMenu('positionField',query=True,v=1) for each in ReplaceSel: if self.checkTemplate(each) == True: #print each if '|' in each: replacerOldName=each.split('|')[-1] else: replacerOldName = each positionSplit = replacerOldName.split('_') newPosName = positionSplit[0]+'_'+positionReplace+'_'+positionSplit[2]+'_'+positionSplit[3]+'_'+positionSplit[4] #print newPosName cmds.rename(each,newPosName) else: cmds.error(each+' does not match naming Template (default_C_default_0000_???)') def replaceBody(self, args=None): ReplaceSel = cmds.ls(sl=1) bodyReplace = cmds.textField('bodyField',query=True,tx=1) for each in ReplaceSel: if self.checkTemplate(each) == True: #print each if '|' in each: replacerOldName
=each.split('|')[-1] else: replacerOldName = each bodySplit = replacerOldName.split('_') newBodyName = bodySplit[0]+'_'+bodySplit[1]+'_'+bodyReplace+'_'+bodySplit[3]+'_'+body
Split[4] #print newBodyName cmds.rename(each,newBodyName) else: cmds.error(each+' does not match naming Template (default_C_default_0000_???)') ###Replace GEO_Suffix def replaceGeoSuffix(self, args=None): ReplaceSel = cmds.ls(sl=1) suffixReplace = cmds.optionMenu('suffixField',query=True,v=1) for each in ReplaceSel: if self.checkTemplate(each) == True: #print each if '|' in each: replacerOldName=each.split('|')[-1] else: replacerOldName = each suffixSplit = replacerOldName.rsplit('_',1) suffixReplaceName = suffixSplit[0] + '_' +suffixReplace #print suffixReplaceName cmds.rename(each,suffixReplaceName) else: cmds.error(each+' does not match naming Template (default_C_default_0000_???)') ###Replacer def replacer(self, args=None): replacerSel = cmds.ls(sl=1) replacerOld = cmds.textField('replacerOldField',query = True,text=True) replacerNew = cmds.textField('replacerNewField',query = True,text=True) for each in replacerSel: if '|' in each: replacerOldName=each.split('|')[-1] else: replacerOldName = each replacerNewName = replacerOldName.replace(replacerOld,replacerNew) print replacerNewName cmds.rename(each, replacerNewName) ###PrefixAdd def addPrefix(self, args=None): prefixSel = cmds.ls(sl=1) prefixAddition = cmds.textField('addPrefixField',query = True,text=True) for each in prefixSel: newPrefixName = prefixAddition+each print newPrefixName cmds.rename(each,newPrefixName) ###Suffix Add def addSuffix(self, args=None): suffixSel = cmds.ls(sl=1) suffixAddition = cmds.textField('addSuffixField',query = True,text=True) for each in suffixSel: newSuffixName = each+suffixAddition print newSuffixName cmds.rename(each,newSuffixName) ###Replace Prefix def replacePrefix(self, args=None): prefixReplaceSel = cmds.ls(sl=1) prefixReplace = cmds.textField('replacePrefixField',query = True,text=True) if prefixReplace == '': pass else: for each in prefixReplaceSel: try: if '|' in each: replacerOldName=each.split('|')[-1] else: replacerOldName = each prefixSplit = replacerOldName.split('_',1) prefixReplaceName = prefixReplace+ '_' +str(prefixSplit[1]) print prefixReplaceName cmds.rename(each,prefixReplaceName) except: pass ###Replace Geo Suffix def replaceSuffix(self, args=None): suffixReplaceSel = cmds.ls(sl=1) suffixReplace = cmds.textField('replaceSuffixField',query = True,text=True) if suffixReplace == '': pass else: for each in suffixReplaceSel: try: if '|' in each: replacerOldName=each.split('|')[-1] else: replacerOldName = each suffixSplit = replacerOldName.rsplit('_',1) suffixReplaceName = suffixSplit[0] + '_' +suffixReplace print suffixReplaceName cmds.rename(each,suffixReplaceName) except: pass
async def post(self, request): """Trigger a Google Actions sync.""" hass = request.app["hass"] cloud: Cloud = hass.data[DOMAIN] gconf = await cloud.client.get_google_config() status = await gconf.async_sync_entities(gconf.agent_user_id) return self.json({}, status_code=status) class CloudLoginView(HomeAssistantView): """Login to Home Assistant cloud.""" url = "/api/cloud/login" name = "api:cloud:login" @_handle_cloud_errors @RequestDataValidator( vol.Schema({vol.Required("email"): str, vol.Required("password"): str}) ) async def post(self, request, data): """Handle login request.""" hass = request.app["hass"] cloud = hass.data[DOMAIN] await cloud.login(data["email"], data["password"]) return self.json({"success": Tru
e}) class CloudLogoutView(HomeAssistantView): """Log out of the Home Assistant cloud.""" url = "/api/cloud/logout" name = "api:cloud:logout" @_handle_cloud_errors async def post(self, request): """Handle logout request.""
" hass = request.app["hass"] cloud = hass.data[DOMAIN] with async_timeout.timeout(REQUEST_TIMEOUT): await cloud.logout() return self.json_message("ok") class CloudRegisterView(HomeAssistantView): """Register on the Home Assistant cloud.""" url = "/api/cloud/register" name = "api:cloud:register" @_handle_cloud_errors @RequestDataValidator( vol.Schema( { vol.Required("email"): str, vol.Required("password"): vol.All(str, vol.Length(min=6)), } ) ) async def post(self, request, data): """Handle registration request.""" hass = request.app["hass"] cloud = hass.data[DOMAIN] with async_timeout.timeout(REQUEST_TIMEOUT): await hass.async_add_job( cloud.auth.register, data["email"], data["password"] ) return self.json_message("ok") class CloudResendConfirmView(HomeAssistantView): """Resend email confirmation code.""" url = "/api/cloud/resend_confirm" name = "api:cloud:resend_confirm" @_handle_cloud_errors @RequestDataValidator(vol.Schema({vol.Required("email"): str})) async def post(self, request, data): """Handle resending confirm email code request.""" hass = request.app["hass"] cloud = hass.data[DOMAIN] with async_timeout.timeout(REQUEST_TIMEOUT): await hass.async_add_job(cloud.auth.resend_email_confirm, data["email"]) return self.json_message("ok") class CloudForgotPasswordView(HomeAssistantView): """View to start Forgot Password flow..""" url = "/api/cloud/forgot_password" name = "api:cloud:forgot_password" @_handle_cloud_errors @RequestDataValidator(vol.Schema({vol.Required("email"): str})) async def post(self, request, data): """Handle forgot password request.""" hass = request.app["hass"] cloud = hass.data[DOMAIN] with async_timeout.timeout(REQUEST_TIMEOUT): await hass.async_add_job(cloud.auth.forgot_password, data["email"]) return self.json_message("ok") @callback def websocket_cloud_status(hass, connection, msg): """Handle request for account info. Async friendly. """ cloud = hass.data[DOMAIN] connection.send_message( websocket_api.result_message(msg["id"], _account_data(cloud)) ) def _require_cloud_login(handler): """Websocket decorator that requires cloud to be logged in.""" @wraps(handler) def with_cloud_auth(hass, connection, msg): """Require to be logged into the cloud.""" cloud = hass.data[DOMAIN] if not cloud.is_logged_in: connection.send_message( websocket_api.error_message( msg["id"], "not_logged_in", "You need to be logged in to the cloud." ) ) return handler(hass, connection, msg) return with_cloud_auth @_require_cloud_login @websocket_api.async_response async def websocket_subscription(hass, connection, msg): """Handle request for account info.""" cloud = hass.data[DOMAIN] with async_timeout.timeout(REQUEST_TIMEOUT): response = await cloud.fetch_subscription_info() if response.status != 200: connection.send_message( websocket_api.error_message( msg["id"], "request_failed", "Failed to request subscription" ) ) data = await response.json() # Check if a user is subscribed but local info is outdated # In that case, let's refresh and reconnect if data.get("provider") and not cloud.is_connected: _LOGGER.debug("Found disconnected account with valid subscriotion, connecting") await hass.async_add_executor_job(cloud.auth.renew_access_token) # Cancel reconnect in progress if cloud.iot.state != STATE_DISCONNECTED: await cloud.iot.disconnect() hass.async_create_task(cloud.iot.connect()) connection.send_message(websocket_api.result_message(msg["id"], data)) @_require_cloud_login @websocket_api.async_response @websocket_api.websocket_command( { vol.Required("type"): "cloud/update_prefs", vol.Optional(PREF_ENABLE_GOOGLE): bool, vol.Optional(PREF_ENABLE_ALEXA): bool, vol.Optional(PREF_ALEXA_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str), } ) async def websocket_update_prefs(hass, connection, msg): """Handle request for account info.""" cloud = hass.data[DOMAIN] changes = dict(msg) changes.pop("id") changes.pop("type") # If we turn alexa linking on, validate that we can fetch access token if changes.get(PREF_ALEXA_REPORT_STATE): try: with async_timeout.timeout(10): await cloud.client.alexa_config.async_get_access_token() except asyncio.TimeoutError: connection.send_error( msg["id"], "alexa_timeout", "Timeout validating Alexa access token." ) return except (alexa_errors.NoTokenAvailable, RequireRelink): connection.send_error( msg["id"], "alexa_relink", "Please go to the Alexa app and re-link the Home Assistant " "skill and then try to enable state reporting.", ) return await cloud.client.prefs.async_update(**changes) connection.send_message(websocket_api.result_message(msg["id"])) @_require_cloud_login @websocket_api.async_response @_ws_handle_cloud_errors async def websocket_hook_create(hass, connection, msg): """Handle request for account info.""" cloud = hass.data[DOMAIN] hook = await cloud.cloudhooks.async_create(msg["webhook_id"], False) connection.send_message(websocket_api.result_message(msg["id"], hook)) @_require_cloud_login @websocket_api.async_response @_ws_handle_cloud_errors async def websocket_hook_delete(hass, connection, msg): """Handle request for account info.""" cloud = hass.data[DOMAIN] await cloud.cloudhooks.async_delete(msg["webhook_id"]) connection.send_message(websocket_api.result_message(msg["id"])) def _account_data(cloud): """Generate the auth data JSON response.""" if not cloud.is_logged_in: return {"logged_in": False, "cloud": STATE_DISCONNECTED} claims = cloud.claims client = cloud.client remote = cloud.remote # Load remote certificate if remote.certificate: certificate = attr.asdict(remote.certificate) else: certificate = None return { "logged_in": True, "email": claims["email"], "cloud": cloud.iot.state, "prefs": client.prefs.as_dict(), "google_entities": client.google_user_config["filter"].config, "alexa_entities": client.alexa_user_config["filter"].config,
import web db = web.datab
ase(dbn='mysql', db='googlemodules', user='ale', passwd='3babes') for url in db.select('function', what='screenshot'): print 'http://www.googlemodules.com/image/screenshot'
class PSFModel(object): def __init__(self, scopeType, psfModel, xySpace, zSpace, emissionWavelength, numericalAperture, designImmersionOilRefractiveIndex, \ designSpecimenLayerRefractiveIndex, actualImmersionOilRefractiveIndex, \ actualSpecimenLayerRefractiveIndex, actualPointSourceDepthInSpecimenLayer, homeDirectory): self.scopeType=scopeType self.psfModel=psfModel self.xySpace=xySpace self.zSpace=zSpace self.emissionWavelength=emissionWavelength self.n
umericalAperture=numericalAperture self.designImmersionOilRefractiveIndex=designImmersionOilRefractiveIndex self.designSpecimenLayerRefractiveIndex=designSpecimenLayerRefractiveIndex self.actualImmersionOilRefractiveIndex=actualImmersionOilRefractiveIndex self.actualSpecim
enLayerRefractiveIndex=actualSpecimenLayerRefractiveIndex self.actualPointSourceDepthInSpecimenLayer=actualPointSourceDepthInSpecimenLayer def CreatePsf(self, command, psfCommandName, xSize, ySize, zSize): module=command.run(psfCommandName, True, \ "xSize", xSize, \ "ySize", ySize, \ "zSize", zSize, \ "fftType", "none", \ "scopeType", self.scopeType, \ "psfModel", self.psfModel, \ "xySpace", self.xySpace, \ "zSpace", self.zSpace, \ "emissionWavelength", self.emissionWavelength, \ "numericalAperture", self.numericalAperture, \ "designImmersionOilRefractiveIndex", self.designImmersionOilRefractiveIndex, \ "designSpecimenLayerRefractiveIndex", self.designSpecimenLayerRefractiveIndex, \ "actualImmersionOilRefractiveIndex", self.actualImmersionOilRefractiveIndex, \ "actualSpecimenLayerRefractiveIndex", self.actualSpecimenLayerRefractiveIndex, \ "actualPointSourceDepthInSpecimenLayer", self.actualPointSourceDepthInSpecimenLayer, \ "centerPsf", True).get() return module.getOutputs().get("output");
# -*- coding:utf-8 -*- '''Created on 2014-8-7 @author: Administrator ''' from sys import path as sys_path if not '..' in sys_path:sys_path.append("..") #用于import上级目录的模块 import web #早起的把一个文件分成多个文件,再把class导入 from login.login import (index,login,loginCheck,In,reset,register,find_password) from blog.blog import (write_blog,upload,blog_content_manage,Get,Del,blog_single_self,blog_single_other) from admin.admin import (adminAdd,adminGet,adminDel,adminEdit) #后期应用web.py 的子应用 from wiki.view import wiki_app from download.download import download_app from meeting.meeting import meeting_app from bbs.bbs import bbs_app urls=( '/','index', '/login','login', '/loginCheck','loginCheck', '/(admin|user_blog)','In', '/reset/(.*)','reset', '/register','register', '/find_password','find_password', '/write_blog','write_blog', '/upload','upload', '/blog_content_manage','blog_content_manage', '/Get/classification','Get', '/Del/blog_content','Del', '/blog_single_self','blog_single_self', '/blog_single_other','blog_single_other', '/admin/add','adminAdd', '/admin/get','adminGet', '/admin/del','adminDel', '/admin/edit','adminEdit', '/wiki',wiki_app, '/download',download_app, '/meeting',meeting_app, '/bbs',bbs_app, ) app = web.application(urls ,locals()) #session 在web.config.debug = False模式下可用 可以用一下方式解决 生产中 一般设置web.config.debug = False web.config.debug = True if web.config.get('_session') is None: session = web.session.Session(app,web.session.DiskStore('sessions')) web.config._
sess
ion=session else: session=web.config._session #用以下方式可以解决多文件之间传递session的问题 def session_hook():web.ctx.session=session app.add_processor(web.loadhook(session_hook)) if __name__=='__main__': app.run()
#!/usr/bin/python import participantCollection participantCollection = participantCollection.ParticipantCollection() numberStillIn = participantCollection.sizeOfParticipantsWhoAreStillIn() initialNumber = participantCollection.size() print "There are currently **" + str(numberStillIn) + " out of " + str(initialNumber) +"** original participants. That's **" + str(int(round(100*numberStillIn/initialNumber,0))) + "%**." print "These participants have checked in at least once in the last 15 days
:" print "" for participant in participantCollection.participantsWhoAreStillInAndHaveCheckedIn(): print "/u/" + participant.name print "" print "These participants have not reported a relapse, so they are still in the running, but **if they do not check in
by the end of today, they will be removed from the list, and will not be considered victorious**:" print "" for participant in participantCollection.participantsWhoAreStillInAndHaveNotCheckedIn(): print "/u/" + participant.name + " ~" print ""
from __future__ import absolute_import, division, print_function, unicode_literals # Statsd client. Loosely based on the version by Steve Ivy <steveivy@gmail.com> import logging import random import socket import time from contextlib import contextmanager log = logging.getLogger(__name__) class StatsD(object): def __init__(self, host='localhost', port=8125, enabled=True, prefix=''): self.addr = None self.enabled = enabled if enabled: self.set_address(host, port) self.prefix = prefix self.udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def set_address(self, host, port=8125): try: self.addr = (socket.gethostbyname(host), port) except socket.gaierror: self.addr = Non
e self.enabled = False @contextmanager de
f timed(self, stat, sample_rate=1): log.debug('Entering timed context for %r' % (stat,)) start = time.time() yield duration = int((time.time() - start) * 1000) log.debug('Exiting timed context for %r' % (stat,)) self.timing(stat, duration, sample_rate) def timing(self, stats, time, sample_rate=1): """ Log timing information """ unit = 'ms' log.debug('%r took %s %s' % (stats, time, unit)) self.update_stats(stats, "%s|%s" % (time, unit), sample_rate) def increment(self, stats, sample_rate=1): """ Increments one or more stats counters """ self.update_stats(stats, 1, sample_rate) def decrement(self, stats, sample_rate=1): """ Decrements one or more stats counters """ self.update_stats(stats, -1, sample_rate) def update_stats(self, stats, delta=1, sampleRate=1): """ Updates one or more stats counters by arbitrary amounts """ if not self.enabled or self.addr is None: return if type(stats) is not list: stats = [stats] data = {} for stat in stats: data["%s%s" % (self.prefix, stat)] = "%s|c" % delta self.send(data, sampleRate) def send(self, data, sample_rate): sampled_data = {} if sample_rate < 1: if random.random() <= sample_rate: for stat, value in data.items(): sampled_data[stat] = "%s|@%s" % (value, sample_rate) else: sampled_data = data try: for stat, value in sampled_data.items(): self.udp_sock.sendto("%s:%s" % (stat, value), self.addr) except Exception as e: log.exception('Failed to send data to the server: %r', e) if __name__ == '__main__': sd = StatsD() for i in range(1, 100): sd.increment('test')
__version
__ = '0.0
.1'
E|re.UNICODE), ': - ') re_mauseparador = (re.compile(ur'(?P<prevchar>[\)a-z])\:[ \.][\–\–\—\-](?P<firstword>[\w\»])', re.LOCALE|re.UNICODE), '\g<prevchar>: - \g<firstword>') re_titulo = (re.compile(ur'((O Sr[\.:])|(A Sr\.?(ª)?))(?!( Deputad))'), '') re_ministro = (re.compile(ur'^Ministr'), '') re_secestado = (re.compile(ur'^Secretári[oa] de Estado.*:'), '') re_palavra = (re.compile(ur'(concedo(-lhe)?|dou|tem|vou dar)(,?[\w ^,]+,?)? a palavra|(faça favor(?! de terminar))', re.UNICODE|re.IGNORECASE), '') re_concluir = (re.compile(ur'(tempo esgotou-se)|(esgotou-se o( seu)? tempo)|((tem (mesmo )?de|queira) (terminar|concluir))|((ultrapassou|esgotou|terminou)[\w ,]* o( seu)? tempo)|((peço|solicito)(-lhe)? que (termine|conclua))|(atenção ao tempo)|(remate o seu pensamento)|(atenção para o tempo de que dispõe)|(peço desculpa mas quero inform)|(deixem ouvir o orador)|(faça favor de prosseguir a sua)|(favor de (concluir|terminar))|(poder prosseguir a sua intervenção)|(faça( o)? favor de continuar|(queira[\w ,]* concluir))', re.UNICODE|re.IGNORECASE), '') re_president = (re.compile(ur'O Sr\.?|A Sr\.?ª? Presidente\ ?(?P<nome>\([\w ]+\))?(?P<sep>\:[ \.]?[\–\–\—\-])'), '') re_cont = (re.compile(ur'O Orador|A Oradora(?P<sep>\:[ \.]?[\–\–\—\-\-])', re.UNICODE), '') re_voto = (re.compile(ur'^Submetid[oa]s? à votação', re.UNICODE), '') re_interv = (re.compile(ur'^(?P<titulo>O Sr[\.:]?|A Sr[\.:]?(ª)?)\ (?P<nome>[\w ,’-]+)\ ?(?P<partido>\([\w -]+\))?(?P<sep>\:?[ \.]?[\–\–\—\-]? ?)', re.UNICODE), '') #re_interv_semquebra = (re.compile(ur'(?P<titulo>O Sr\.?|A Sr(\.)?(ª)?)\ (?P<nome>[\w ,’-]{1,30})\ ?(?P<partido>\([\w -]+\))?(?P<sep>\:[ \.]?[\–\–\—\-])', re.UNICODE), '') re_interv_semquebra = (re.compile(ur'(?P<titulo>O Sr\.?|A Sr(\.)?(ª)?)\ (?P<nome>[\w ,’-]{1,50})\ ?(?P<partido>\([\w -]+\))?(?P<sep>\:[ \.]?[\–\–\—\-] )', re.UNICODE), '') re_interv_simples = (re.compile(ur'^(?P<nome>[\w ,’-]+)\ ?(?P<partido>\([\w -]+\))?\ ?(?P<sep>\:?[ \.]?[\–\–\—\-]? )', re.UNICODE), '') def change_type(p, newtype): stype, text = p.split(']', 1) text = text.strip() return '[%s] %s' % (newtype, text) def get_type(p): stype, text = p.split(']', 1) stype = stype.strip('[] ') return stype def get_speaker(p): stype, text = p.split(']', 1) text = text.strip() try: speaker, text = re.split(re_separador[0], text, 1) except ValueError: print 'Não consegui determinar o speaker. Vai vazio.' print ' ' + p print raise return '' return speaker def get_text(p): stype, text = p.split(']', 1) text = text.strip() if ': -' in text: speaker, text = text.split(':', 1) else: pass return text def strip_type(p): stype, text = p.split(']', 1) text = text.strip() return text def check_and_split_para(p): # verificar se tem regex da intervenção # se não, return None # se sim, dividir e reagrupar pass class RaspadarTagger: def __init__(self): self.contents = [] # cache para registar cargos de governo e nomes self.gov_posts = {} def parse_txt_file(self, txtfile): buffer = open(txtfile, 'r').read() paragraphs = buffer.split('\n\n') for para in paragraphs: self.parse_paragraph(para) self.process_orphans() def parse_paragraph(self, p): p = p.decode('utf-8') p = p.strip(' \n') if not p: return # FIXME: monkeypatch aqui: É preciso rectificar os separadores. Isto devia # acontecer no html2txt, mas não tenho tempo agora para re-processar # os HTML's. Desculpem lá. if re.search(re_mauseparador[0], p): p = re.sub(re_mauseparador[0], re_mauseparador[1], p, count=1) # corresponde à regex de intervenção? if re.search(re_interv[0], p): # é intervenção self.parse_statement(p) elif re.search(re_cont[0], p): # é a continuação de uma intervenção ("O Orador") self.parse_statement(p, cont=True) else: # é outra coisa self.parse_other(p) def parse_statement(self, p, cont=False): if cont: p = re.sub(re_cont[0], re_cont[1], p, 1) p = re.sub(re_separador[0], '', p, 1).strip() stype = MP_CONT else: if not (re.match(re_titulo[0], p) and re.search(re_separador[0], p)): stype = ORPHAN else: speaker,
text = re.split(re_separador[0], p, 1) speaker = re.sub(re_titulo[0], re_titulo[1], speaker,
count=1).strip(u'ª \n') p = speaker + ': - ' + text.strip() if p.startswith('Presidente'): return self.parse_president(p) elif re.match(re_ministro[0], p) or re.match(re_secestado[0], p): return self.parse_government(p) elif p.startswith(u'Secretári') and not 'Estado' in re.split(re_separador[0], p)[0]: return self.parse_secretary(p) elif re.match(re_interv_simples[0], p): stype = MP_STATEMENT else: stype = STATEMENT output = '[%s] %s' % (stype, p) # encontrar intervenções onde não há quebra de linha # TODO: este check tem de ser feito no parse_paragraph if re.search(re_interv_semquebra[0], output): #print '### Encontrei uma condensada: ###' result = re.split(re_interv_semquebra[0], output) new_p = '' for part in result[1:]: if part and part != u'ª': if part.endswith(('.', u'ª')): new_p += part + ' ' else: new_p += part # arrumar a primeira parte # print 'Primeira: ' + result[0] # print 'Segunda: ' + new_p # print self.contents.append(result[0]) # processar a segunda try: self.parse_statement(new_p) except RuntimeError: # loop infinito, vamos mostrar o que se passa print 'Loop infinito ao processar uma linha com mais do que uma intervenção.' print u'1ª: ' + result[0] print u'2ª: ' + new_p raise return self.contents.append(output) return output def parse_president(self, p): # extrair nome do/a presidente, caso lá esteja m = re.search(re_president[0], p) if m: name = m.group('nome') # retirar todo o nome e separador p = re.sub(re_president[0], re_president[1], p, 1).strip() if u'encerrada a sessão' in p or u'encerrada a reunião' in p: stype = PRESIDENT_CLOSE elif (u'quórum' in p or 'quorum' in p) and 'aberta' in p: stype = PRESIDENT_OPEN p = p.replace('Presidente: - ', '', 1) elif re.search(re_palavra[0], p): stype = PRESIDENT_NEWSPEAKER elif re.search(re_concluir[0], p): stype = PRESIDENT_ASIDE else: stype = PRESIDENT_STATEMENT output = '[%s] %s' % (stype, p) self.contents.append(output) return output def parse_government(self, p): # A linha vem assim # Ministra da Saúde (Alice Nenhures): - Acho muito bem! # E nós queremos # Alice Nenhures (Ministra da Saúde): - Acho muito bem! # E nas partes onde só é indicado o cargo, queremos re-incluir o nome # e para isso usamos o dicionário self.gov_posts como cache result = re.split(re_separador[0], p, 1) if len(result) == 2: speaker, text = result elif len(result) == 1: if re.search(re_separador[0], result[0]): # erros de redacção ex. 'Ministro do Trabalho: Blá blá blá' speaker, text = re.split(re.separador[0], result[0], 1) else: print ' Result too short' print re
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distrib
uted with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may ob
tain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from __future__ import print_function from .engine import RouterEngine from .address import Address __all__ = ["RouterEngine", "Address"]
return_sequences=True, go_backwards=False, name='rnn_fw') self.rnn_bw = tf.keras.layers.CuDNNLSTM(units=num_units // 2, return_sequences=True, go_backwards=False, name='rnn_bw') def forward(self, inputs, masks): def rnn_fn(x, m, rnn): x = rnn(x) # x = tf.reduce_max(x, 1) # max pooling # x = mean_pool(x, m) # mean pooling indices = tf.reduce_sum(m, 1, keepdims=True) - 1 x = tf.gather_nd(x, tf.cast(indices, tf.int32), batch_dims=1) return x lengths = tf.reduce_sum(tf.cast(masks, tf.int32), axis=1) masks = tf.cast(masks, tf.float32) inputs = tf.multiply(inputs, tf.expand_dims(masks, 2)) inputs_bw = tf.reverse_sequence(inputs, lengths, 1, 0) outputs_fw = rnn_fn(inputs, masks, self.rnn_fw) outputs_bw = rnn_fn(inputs_bw, masks, self.rnn_bw) outputs = tf.concat([outputs_fw, outputs_bw], axis=1) return outputs class Transformer(object): def __init__(self, num_units): self.hidden = tf.keras.layers.Dense(num_units) self.transformer = TransformerBlock(num_units, num_units * 4, num_layer=2) def forward(self, inputs, masks): masks = tf.cast(masks, tf.float32) inputs = tf.multiply(inputs, tf.expand_dims(masks, 2)) inputs = self.hidden(inputs) return self.transformer.forward(inputs, masks) class DAN(object): def __init__(self, num_units): self.hidden = tf.keras.layers.Dense(num_units, activation=tf.nn.relu) def forward(self, inputs, masks): masks = tf.cast(masks, tf.float32) inputs = tf.multiply(inputs, tf.expand_dims(masks, 2)) inputs = tf.reduce_sum(inputs, 1) / tf.reduce_sum(masks, 1, keepdims=True) return self.hidden(inputs) def get_text_encoder(encoder_type='rnn'): if encoder_type == 'rnn': return RNN elif encoder_type == 'trans': return Transformer elif encoder_type == 'dan': return DAN else: raise ValueError(encoder_type) class ImageTextEmbedding(object): def __init__(self, word_emb, encoder_dim, encoder_type='rnn', norm=True, drop_p=0.25, contrastive=False, margin=0.5, num_neg_sample=10, lambda1=1.0, lambda2=1.0, internal=True): self.word_emb = tf.Variable(tf.convert_to_tensor(word_emb), name="emb", trainable=True) self.text_encoder = get_text_encoder(encoder_type)(encoder_dim) self.text_feat_proj = tf.keras.layers.Dense(encoder_dim) self.img_feat_proj = tf.keras.layers.Dense(encoder_dim) self.dropout = tf.keras.layers.Dropout(drop_p) self.margin = margin self.num_neg_sample = num_neg_sample self.lambda1 = lambda1 self.lambda2 = lambda2 self.contrastive = contrastive self.internal = internal self.norm = norm # normalize the embedding self.text_outputs = [] def forward_img(self, img_inputs, training): x = self.img_feat_proj(img_inputs) if self.norm: x = tf.nn.l2_normalize(x, axis=-1) return self.dropout(x, training=training) def forward_text(self, text_inputs, text_masks, training): if len(text_inputs.get_shape()) == 2: x = tf.nn.embedding_lookup(self.word_emb, text_inputs) else: x = text_inputs self.text_outputs.append(mean_pool(x, text_masks)) x = self.text_encoder.forward(x, text_masks) self.text_outputs.append(x) x = self.text_feat_proj(x) if self.norm: x = tf.nn.l2_normalize(x, axis=-1) return self.dropout(x, training=training) def encode(self, img_inputs, text_inputs, text_masks, training): img_feats = self.forward_img(img_inputs, training) text_feats = self.forward_text(text_inputs, text_masks, training) return img_feats, text_feats def forward(self, img_inputs, text_inputs, text_masks, labels, training): img_feats, text_feats = self.encode(img_inputs, text_inputs, text_masks, training) if self.contrastive: loss = contrastive_loss(img_feats, text_feats, self.margin) sent_im_dist = - similarity_fn(text_feats, img_feats) elif self.internal: loss = internal_loss(img_feats, text_feats, labels)
sent_im_dist = - similarity_fn(text_feats, img_fe
ats) else: loss = embedding_loss(img_feats, text_feats, labels, self.margin, self.num_neg_sample, self.lambda1, self.lambda2) sent_im_dist = pdist(text_feats, img_feats) rec = recall_k(sent_im_dist, labels, ks=[1, 5, 10]) return loss, rec def order_sim(im, s): im = tf.expand_dims(im, 0) s = tf.expand_dims(s, 1) diff = tf.clip_by_value(s - im, 0, 1e6) dist = tf.sqrt(tf.reduce_sum(diff ** 2, 2)) scores = -tf.transpose(dist) return scores def similarity_fn(im, s, order=False): if order: return order_sim(im, s) return tf.matmul(im, s, transpose_b=True) def internal_loss(im_embeds, sent_embeds, im_labels): logits_s = tf.matmul(sent_embeds, im_embeds, transpose_b=True) cost_s = tf.nn.softmax_cross_entropy_with_logits_v2(im_labels, logits_s) logits_im = tf.matmul(im_embeds, sent_embeds, transpose_b=True) cost_im = tf.nn.softmax_cross_entropy_with_logits_v2(tf.transpose(im_labels), logits_im) return tf.reduce_mean(cost_s) + tf.reduce_mean(cost_im) def contrastive_loss(im_embeds, sent_embeds, margin, max_violation=True): """ modified https://github.com/fartashf/vsepp/blob/master/model.py#L260 """ scores = similarity_fn(im_embeds, sent_embeds) batch_size = tf.shape(im_embeds)[0] diagonal = tf.diag_part(scores) d1 = tf.reshape(diagonal, (batch_size, 1)) d2 = tf.reshape(diagonal, (1, batch_size)) cost_s = tf.clip_by_value(margin + scores - d1, 0, 1e6) cost_im = tf.clip_by_value(margin + scores - d2, 0, 1e6) zeros = tf.zeros(batch_size) cost_s = tf.matrix_set_diag(cost_s, zeros) cost_im = tf.matrix_set_diag(cost_im, zeros) if max_violation: cost_s = tf.reduce_max(cost_s, 1) cost_im = tf.reduce_max(cost_im, 0) return tf.reduce_sum(cost_s) + tf.reduce_sum(cost_im) def pdist(x1, x2): """ x1: Tensor of shape (h1, w) x2: Tensor of shape (h2, w) Return pairwise distance for each row vector in x1, x2 as a Tensor of shape (h1, h2) """ x1_square = tf.reshape(tf.reduce_sum(x1 * x1, axis=1), [-1, 1]) x2_square = tf.reshape(tf.reduce_sum(x2 * x2, axis=1), [1, -1]) return tf.sqrt(x1_square - 2 * tf.matmul(x1, tf.transpose(x2)) + x2_square + 1e-4) def embedding_loss(im_embeds, sent_embeds, im_labels, margin, num_neg_sample, lambda1, lambda2): """ im_embeds: (b, 512) image embedding tensors sent_embeds: (sample_size * b, 512) sentence embedding tensors where the order of sentence corresponds to the order of images and setnteces for the same image are next to each other im_labels: (sample_size * b, b) boolean tensor, where (i, j) entry is True if and only if sentence[i], image[j] is a positive pair """ im_labels = tf.cast(im_labels, tf.bool) # compute embedding loss num_img = tf.shape(im_embeds)[0] num_sent = tf.shape(sent_embeds)[0] sent_im_ratio = tf.div(num_sent, num_img) sent_im_dist = pdist(sent_embeds, im_embeds) # image loss: sentence, positive image, and negative image pos_pair_dist = tf.reshape(tf.boolean_mask(sent_im_dist, im_labels), [num_sent, 1]) neg_pair_dist = tf.reshape(tf.boolean_mask(sent_im_dist, ~im_labels), [num_sent, -1]) im_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist, 0, 1e6) im_loss = tf.reduce_mean(tf.nn.top_k(im_loss, k=num_neg_sample)[0]) # sentence loss: image, positive sentence, and negative sentence neg_pair_dist = tf.reshape( tf.boolean_mask(tf.transpose(sent_im_dist), ~tf.transpose(im_labels)), [num_img, -1]) neg_pair_d
''' Created on 11/02/2010 @author: henry@henryjenkins.name ''' class webInterface(object): ''' classdocs ''' writeFile = None def __init__(self): pass def __openFile(self, fileName): self.writeFile = open(fileName, 'w') def closeFile(self): self.writeFile.close() def writeHeader(self, title = 'Henry\'s iptables data accounting'): self.writeFile.write('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">\n') self.writeFile.write('<HTML>\n') self.writeFile.write('<HEAD>\n') self.writeFile.write('<TITLE>' + title + '</TITLE>\n') self.write
File.write('</HEAD>\n') def writeBody(self, users): self.writeFile.write('<BODY>\n') self.writeFile.write('<table border="1">') self.writeFile.write('<tr>') self.writeFile.write('<td>IP address</td>') self.writeFile.write('<td>On-peak Packets</td>') self.writeFile.write('<td>On-peak Data</td>') self.writeFile.write('<td>Off-peak Packets</td>')
self.writeFile.write('<td>Off-peak Data</td>') self.writeFile.write('<td>Total Packets</td>') self.writeFile.write('<td>Total Data</td>') self.writeFile.write('</tr>') usersList = users.keys() usersList.sort() for user in usersList: self.writeFile.write('<tr>') self.writeFile.write('<td>' + user + '</td>') self.writeFile.write('<td>' + str(users[user].getUpData('pkts', date=None, peak='other')) + '</td>') self.writeFile.write('<td>' + self.humanizeNumber(users[user].getUpData('data', date=None, peak='other')) + '</td>') self.writeFile.write('<td>' + str(users[user].getDownData('pkts', date=None, peak='other')) + '</td>') self.writeFile.write('<td>' + self.humanizeNumber(users[user].getDownData('data', date=None, peak='other')) + '</td>') self.writeFile.write('<td>' + str(users[user].getData(type = 'pkts')) + '</td>') self.writeFile.write('<td>' + self.humanizeNumber(users[user].getData(type = 'data')) + '</td>') self.writeFile.write('</tr>') self.writeFile.write('</table>') self.writeFile.write('</BODY>\n') def writeFooter(self): self.writeFile.write('</HTML>\n') def humanizeNumber(self,number = 0): if number > 1024*1024*1024: number = number/(1024*1024*1024) number = str(number) + ' GBytes' elif number > 1024*1024: number = number/(1024*1024) number = str(number) + ' MBytes' elif number > 1024: number = number/1024 number = str(number) + ' KBytes' else: number = str(number) + ' Bytes' return number def outputIndex(self,file,users = None): self.__openFile(file) self.writeHeader() self.writeBody(users) self.writeFooter() self.closeFile()
import platform # ----------------------------------------------------------------------------- # Guess platform we are running on def current_platform(): machine = platfor
m.machine() if machine == 'armv5tejl': return 'ev3' elif machine == 'armv6l': return 'brickpi' else: return 'unsupported' if current_platform() ==
'brickpi': from .brickpi import * else: # Import ev3 by default, so that it is covered by documentation. from .ev3 import *
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import os import requests import sys import tempfile import zipfile from . import Command class Deploy(Command): """Deploy a module on an Odoo instance""" def __init__(self): super(Deploy, self).__init__() self.session = requests.session() def deploy_module(self, module_path, url, login, password, db='', force=False): url = url.rstrip('/') csrf_token = self.authenticate(url, login, password, db) module_file = self.zip_module(module_path) try: return self.upload_module(url, module_file, force=force, csrf_token=csrf_token) finally: os.remove(module_file) def upload_module(self, server, module_file, force=False, csrf_token=None): print("Uploading module file...") url = server + '/base_import_module/upload' post_data = {'force': '1' if force else ''} if csrf_token: post_data['csrf_token'] = csrf_token with open(module_file, 'rb') as f: res = self.session.post(url, files={'mod_file': f}, data=post_data) res.raise_for_status() return res.text def authenticate(self, server, login, password, db=''): print("Authenticating on server '%s' ..." % server) # Fixate session with a given db if any self.session.get(server + '/web/login', params=dict(db=db)) args = dict(login=login, password=password, db=db) res = self.session.post(server + '/base_import_module/login', args) if res.status_code == 404: raise Exception("The server '%s' does not have the 'base_import_module' installed." % server) elif res.status_code != 200: raise Exception(res.text) return res.headers.get('x-csrf-token') def zip_module(self, path): path = os.path.abspath(path) if not os.path.isdir(path): raise Exception("Could not find module directory '%s'" % path) container, module_name = os.path.split(path) temp = tempfile.mktemp(suffix='.zip') try: print("Zipping module directory...") with zipfile.ZipFile(temp, 'w') as zfile: for root, dirs, files in os.walk(path): for file in files: file_path = os.path.join(root, file) zfile.write(file_path, file_path.split(container).pop()) r
eturn temp except Exception: os.remove(temp) raise def run(self, cmdargs): parser = argparse.ArgumentParser( prog="%s deploy" % sys.argv[0].split(os.path.sep)[-1], description=self.__doc__ ) parser.add_argument('path', help="Path of the module to deploy") parser.add_argument('u
rl', nargs='?', help='Url of the server (default=http://localhost:8069)', default="http://localhost:8069") parser.add_argument('--db', dest='db', help='Database to use if server does not use db-filter.') parser.add_argument('--login', dest='login', default="admin", help='Login (default=admin)') parser.add_argument('--password', dest='password', default="admin", help='Password (default=admin)') parser.add_argument('--verify-ssl', action='store_true', help='Verify SSL certificate') parser.add_argument('--force', action='store_true', help='Force init even if module is already installed. (will update `noupdate="1"` records)') if not cmdargs: sys.exit(parser.print_help()) args = parser.parse_args(args=cmdargs) if not args.verify_ssl: self.session.verify = False try: if not args.url.startswith(('http://', 'https://')): args.url = 'https://%s' % args.url result = self.deploy_module(args.path, args.url, args.login, args.password, args.db, force=args.force) print(result) except Exception, e: sys.exit("ERROR: %s" % e)
#!/usr/bin/python # # Urwid html fragment output wrapper for "screen shots" # Copyright (C) 2004-2007 Ian Ward # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Urwid web site: http://excess.org/urwid/ """ HTML PRE-based UI implementation """ from urwid import util from urwid.main_loop import ExitMainLoop from urwid.display_common import AttrSpec, BaseScreen # replace control characters with ?'s _trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)]) _default_foreground = 'black' _default_background = 'light gray' class HtmlGeneratorSimulationError(Exception): pass class HtmlGenerator(BaseScreen): # class variables fragments = [] sizes = [] keys = [] started = True def __init__(self): super(HtmlGenerator, self).__init__() self.colors = 16 self.bright_is_bold = False # ignored self.has_underline = True # ignored self.register_palette_entry(None, _default_foreground, _default_background) def set_terminal_properties(self, colors=None, bright_is_bold=None, has_underline=None): if colors is None: colors = self.colors if bright_is_bold is None: bright_is_bold = self.bright_is_bold if has_underline is None: has_underline = self.has_underline self.colors = colors self.bright_is_bold = bright_is_bold self.has_underline = has_underline def set_mouse_tracking(self, enable=True): """Not yet implemented""" pass def start(self): pass def stop(self): pass def set_input_timeouts(self, *args): pass def reset_default_terminal_palette(self, *args): pass def run_wrapper(self,fn): """Call fn.""" return fn() def draw_screen(self, (cols, rows), r ): """Create an html fragment from the render object. Append it to HtmlGenerator.fragments list. """ # collect output in l l = [] assert r.rows() == rows if r.cursor is not None: cx, cy = r.cursor else: cx = cy = None y = -1 for row in r.content(): y += 1 col = 0 for a, cs, run in row: run = run.translate(_trans_table) if isinstance(a, AttrSpec): aspec = a else: aspec = self._palette[a][ {1: 1, 16: 0, 88:2, 256:3}[self.colors]] if y == cy and col <= cx: run_width = util.calc_width(run, 0, len(run)) if col+run_width > cx: l.appe
nd(html_span(run, aspec, cx-col)) el
se: l.append(html_span(run, aspec)) col += run_width else: l.append(html_span(run, aspec)) l.append("\n") # add the fragment to the list self.fragments.append( "<pre>%s</pre>" % "".join(l) ) def clear(self): """ Force the screen to be completely repainted on the next call to draw_screen(). (does nothing for html_fragment) """ pass def get_cols_rows(self): """Return the next screen size in HtmlGenerator.sizes.""" if not self.sizes: raise HtmlGeneratorSimulationError, "Ran out of screen sizes to return!" return self.sizes.pop(0) def get_input(self, raw_keys=False): """Return the next list of keypresses in HtmlGenerator.keys.""" if not self.keys: raise ExitMainLoop() if raw_keys: return (self.keys.pop(0), []) return self.keys.pop(0) _default_aspec = AttrSpec(_default_foreground, _default_background) (_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = ( _default_aspec.get_rgb_values()) def html_span(s, aspec, cursor = -1): fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values() # use real colours instead of default fg/bg if fg_r is None: fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b if bg_r is None: bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b) html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b) if aspec.standout: html_fg, html_bg = html_bg, html_fg extra = (";text-decoration:underline" * aspec.underline + ";font-weight:bold" * aspec.bold) def html_span(fg, bg, s): if not s: return "" return ('<span style="color:%s;' 'background:%s%s">%s</span>' % (fg, bg, extra, html_escape(s))) if cursor >= 0: c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor) c2_off = util.move_next_char(s, c_off, len(s)) return (html_span(html_fg, html_bg, s[:c_off]) + html_span(html_bg, html_fg, s[c_off:c2_off]) + html_span(html_fg, html_bg, s[c2_off:])) else: return html_span(html_fg, html_bg, s) def html_escape(text): """Escape text so that it will be displayed safely within HTML""" text = text.replace('&','&amp;') text = text.replace('<','&lt;') text = text.replace('>','&gt;') return text def screenshot_init( sizes, keys ): """ Replace curses_display.Screen and raw_display.Screen class with HtmlGenerator. Call this function before executing an application that uses curses_display.Screen to have that code use HtmlGenerator instead. sizes -- list of ( columns, rows ) tuples to be returned by each call to HtmlGenerator.get_cols_rows() keys -- list of lists of keys to be returned by each call to HtmlGenerator.get_input() Lists of keys may include "window resize" to force the application to call get_cols_rows and read a new screen size. For example, the following call will prepare an application to: 1. start in 80x25 with its first call to get_cols_rows() 2. take a screenshot when it calls draw_screen(..) 3. simulate 5 "down" keys from get_input() 4. take a screenshot when it calls draw_screen(..) 5. simulate keys "a", "b", "c" and a "window resize" 6. resize to 20x10 on its second call to get_cols_rows() 7. take a screenshot when it calls draw_screen(..) 8. simulate a "Q" keypress to quit the application screenshot_init( [ (80,25), (20,10) ], [ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] ) """ try: for (row,col) in sizes: assert type(row) == int assert row>0 and col>0 except (AssertionError, ValueError): raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]" try: for l in keys: assert type(l) == list for k in l: assert type(k) == str except (AssertionError, ValueError): raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]" import curses_display curses_display.Screen = HtmlGenerator import raw_display raw_display.Screen = HtmlGenerator HtmlGenerator.sizes = sizes HtmlGenerator.keys = keys def screenshot_collect(): """Return screenshots as a list of HTML fragments.""" l = HtmlGenerator.fragments HtmlGenerator.fragments = [] return l
from django.contrib import admin # from models import Agent, ReCa, Accom
odation, Beach, Activity, Contact # # @admin.register(ReCa, Activity) # class VenueAdmin(admin.ModelAdmin): # list_display = ('name', 'internal_rating', 'ready', 'description',) # list_filter = ('ready', 'internal_rating',) # search_fields = ['name', 'description', 'address'] # ordering = ['id'] # s
ave_on_top = True # # # @admin.register(Accomodation) # class AccomodAdmin(VenueAdmin): # list_display = ('name', 'stars', 'ready', 'description',) # list_filter = ('ready', 'stars',) # # # @admin.register(Beach) # class BeachAdmin(admin.ModelAdmin): # list_display = ('name', 'type', 'description',) # list_filter = ('name',) # # # admin.site.register(Agent) # admin.site.register(Contact) # #
029]') # This is required because u() will mangle the string and ur'' isn't valid # python3 syntax ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]') ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') HAS_UTF8 = re.compile(r'[\x80-\xff]') ESCAPE_DCT = { '\\': '\\\\', '"': '\\"', '\b': '\\b', '\f': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', } for i in range(0x20): #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) for i in [0x2028, 0x2029]: ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,)) FLOAT_REPR = repr def encode_basestring(s, _PY3=PY3, _q=u('"')): """Return a JSON representation of a Python string """ if _PY3: if isinstance(s, binary_type): s = s.decode('utf-8') else: if isinstance(s, str) and HAS_UTF8.search(s) is not None: s = s.decode('utf-8') def replace(match): return ESCAPE_DCT[match.group(0)] return _q + ESCAPE.sub(replace, s) + _q def py_encode_basestring_ascii(s, _PY3=PY3): """Return an ASCII-only JSON representation of a Python string """ if _PY3: if isinstance(s, binary_type): s = s.decode('utf-8') else: if isinstance(s, str) and HAS_UTF8.search(s) is not None: s = s.decode('utf-8') def replace(match): s = match.group(0) try: return ESCAPE_DCT[s] except KeyError: n = ord(s) if n < 0x10000: #return '\\u{0:04x}'.format(n) return '\\u%04x' % (n,) else: # surrogate pair n -= 0x10000 s1 = 0xd800 | ((n >> 10) & 0x3ff) s2 = 0xdc00 | (n & 0x3ff) #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) return '\\u%04x\\u%04x' % (s1, s2) return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' encode_basestring_ascii = ( c_encode_basestring_ascii or py_encode_basestring_ascii) class JSONEncoder(object): """Extensible JSON <http://json.org> encoder for Python data structures. Supports the following objects and types by default: +-------------------+---------------+ | Python | JSON | +===================+===============+ | dict, namedtuple | object | +-------------------+---------------+ | list, tuple | array | +-------------------+---------------+ | str, unicode | string | +-------------------+---------------+ | int, long, float | number | +-------------------+---------------+ | True | true | +-------------------+---------------+ | False | false | +-------------------+---------------+ | None | null | +-------------------+---------------+ To extend this to recognize other objects, subclass and implement a ``.default()`` method with another method that returns a serializable object for ``o`` if possible, otherwise it should call the superclass implementation (to raise ``TypeError``). """ item_separator = ', ' key_separator = ': ' def __init__(self, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, encoding='utf-8', default=None, use_decimal=True, namedtuple_as_object=True, tuple_as_array=True, bigint_as_string=False, item_sort_key=None, for_json=False, ignore_nan=False, int_as_string_bitcount=None): """Constructor for JSONEncoder, with sensible defaults. If skipkeys is false, then it is a TypeError to attempt encoding of keys that are not str, int, long, float or None. If skipkeys is True, such items are simply skipped. If ensure_ascii is true, the output is guaranteed to be str objects with all incoming unicode characters escaped. If ensure_ascii is false, the output will be unicode object. If check_circular is true, then lists, dicts, and custom encoded objects will be checked for circular references during encoding to prevent an infinite recursion (which would cause an OverflowError). Otherwise, no such check takes place. If allow_nan is true, then NaN, Infinity, and -Infinity will be encoded as such. This behavior is not JSON specification compliant, but is consistent with most JavaScript based encoders and decoders. Otherwise, it will be a ValueError to encode such floats. If sort_keys is true, then the output of dictionar
ies will be sorted by key; this is useful for regression tests to ensure that JSON serializations can be compared on a day-to-day basis. If indent is a string, then JSON array elements and object members will be pretty-printed with a newline followed by that string repeated for each level of nesting. ``None`` (the default) selects the most compact repre
sentation without any newlines. For backwards compatibility with versions of simplejson earlier than 2.1.0, an integer is also accepted and is converted to a string with that many spaces. If specified, separators should be an (item_separator, key_separator) tuple. The default is (', ', ': ') if *indent* is ``None`` and (',', ': ') otherwise. To get the most compact JSON representation, you should specify (',', ':') to eliminate whitespace. If specified, default is a function that gets called for objects that can't otherwise be serialized. It should return a JSON encodable version of the object or raise a ``TypeError``. If encoding is not None, then all input strings will be transformed into unicode using that encoding prior to JSON-encoding. The default is UTF-8. If use_decimal is true (not the default), ``decimal.Decimal`` will be supported directly by the encoder. For the inverse, decode JSON with ``parse_float=decimal.Decimal``. If namedtuple_as_object is true (the default), objects with ``_asdict()`` methods will be encoded as JSON objects. If tuple_as_array is true (the default), tuple (and subclasses) will be encoded as JSON arrays. If bigint_as_string is true (not the default), ints 2**53 and higher or lower than -2**53 will be encoded as strings. This is to avoid the rounding that happens in Javascript otherwise. If int_as_string_bitcount is a positive number (n), then int of size greater than or equal to 2**n or lower than or equal to -2**n will be encoded as strings. If specified, item_sort_key is a callable used to sort the items in each dictionary. This is useful if you want to sort items other than in alphabetical order by key. If for_json is true (not the default), objects with a ``for_json()`` method will use the return value of that method for encoding as JSON instead of the object. If *ignore_nan* is true (default: ``False``), then out of range :class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as ``null`` in compliance with the ECMA-262 specification. If true, this will override *allow_nan*. """ self.skipkeys = skipkeys self.ensure_ascii = ensure_ascii self.check_circular = check_circular self.allow_nan = allow_nan self.sort_keys = sort_keys self.use_decimal = use_decimal self.namedtuple_as_object = namedtuple_as_object self.tuple_as_array = tuple_as_array self.bigint_as_string = bigint_as_string self.item_sort_key = item_sort_key self.for_json = for_json self.ignore_nan = ignore_nan self.int_as_string_bitcount = int_as_string_bitcount if indent is not None and not isinstance(indent, stri
The proportion of the mask which is ``True`` :type: double """ return (self.n_true * 1.0) / self.n_pixels @property def proportion_false(self): r""" The proportion of the mask which is ``False`` :type: double """ return (self.n_false * 1.0) / self.n_pixels @property def true_indices(self): r""" The indices of pixels that are true. :type: (``n_dims``, ``n_true``) ndarray
""" # Ignore the channel axis return np.vstack(np.nonzero(self.pixels[..., 0])).T @property def false_indices(self): r""" The indices of pixels that are false. :type: (``n_dims``, ``n_false``) ndarray
""" # Ignore the channel axis return np.vstack(np.nonzero(~self.pixels[..., 0])).T @property def all_indices(self): r""" Indices into all pixels of the mask, as consistent with true_indices and false_indices :type: (``n_dims``, ``n_pixels``) ndarray """ return np.indices(self.shape).reshape([self.n_dims, -1]).T def __str__(self): return ('{} {}D mask, {:.1%} ' 'of which is True '.format(self._str_shape, self.n_dims, self.proportion_true)) def from_vector(self, flattened): r""" Takes a flattened vector and returns a new :class:`BooleanImage` formed by reshaping the vector to the correct dimensions. Note that this is rebuilding a boolean image **itself** from boolean values. The mask is in no way interpreted in performing the operation, in contrast to MaskedImage, where only the masked region is used in from_vector() and as_vector(). Any image landmarks are transferred in the process. Parameters ---------- flattened : (``n_pixels``,) np.bool ndarray A flattened vector of all the pixels of a BooleanImage. Returns ------- image : :class:`BooleanImage` New BooleanImage of same shape as this image """ mask = BooleanImage(flattened.reshape(self.shape)) mask.landmarks = self.landmarks return mask def invert(self): r""" Inverts the current mask in place, setting all True values to False, and all False values to True. """ self.pixels = ~self.pixels def inverted_copy(self): r""" Returns a copy of this Boolean image, which is inverted. Returns ------- inverted_image: :class:`BooleanNSImage` An inverted copy of this boolean image. """ inverse = deepcopy(self) inverse.invert() return inverse def bounds_true(self, boundary=0, constrain_to_bounds=True): r""" Returns the minimum to maximum indices along all dimensions that the mask includes which fully surround the True mask values. In the case of a 2D Image for instance, the min and max define two corners of a rectangle bounding the True pixel values. Parameters ---------- boundary : int, optional A number of pixels that should be added to the extent. A negative value can be used to shrink the bounds in. Default: 0 constrain_to_bounds: bool, optional If True, the bounding extent is snapped to not go beyond the edge of the image. If False, the bounds are left unchanged. Default: True Returns -------- min_b : (D,) ndarray The minimum extent of the True mask region with the boundary along each dimension. If constrain_to_bounds was True, is clipped to legal image bounds. max_b : (D,) ndarray The maximum extent of the True mask region with the boundary along each dimension. If constrain_to_bounds was True, is clipped to legal image bounds. """ mpi = self.true_indices maxes = np.max(mpi, axis=0) + boundary mins = np.min(mpi, axis=0) - boundary if constrain_to_bounds: maxes = self.constrain_points_to_bounds(maxes) mins = self.constrain_points_to_bounds(mins) return mins, maxes def bounds_false(self, boundary=0, constrain_to_bounds=True): r""" Returns the minimum to maximum indices along all dimensions that the mask includes which fully surround the False mask values. In the case of a 2D Image for instance, the min and max define two corners of a rectangle bounding the False pixel values. Parameters ---------- boundary : int >= 0, optional A number of pixels that should be added to the extent. A negative value can be used to shrink the bounds in. Default: 0 constrain_to_bounds: bool, optional If True, the bounding extent is snapped to not go beyond the edge of the image. If False, the bounds are left unchanged. Default: True Returns -------- min_b : (D,) ndarray The minimum extent of the False mask region with the boundary along each dimension. If constrain_to_bounds was True, is clipped to legal image bounds. max_b : (D,) ndarray The maximum extent of the False mask region with the boundary along each dimension. If constrain_to_bounds was True, is clipped to legal image bounds. """ return self.inverted_copy().bounds_true( boundary=boundary, constrain_to_bounds=constrain_to_bounds) def warp_to(self, template_mask, transform, warp_landmarks=False, interpolator='scipy', **kwargs): r""" Warps this BooleanImage into a different reference space. Parameters ---------- template_mask : :class:`menpo.image.boolean.BooleanImage` Defines the shape of the result, and what pixels should be sampled. transform : :class:`menpo.transform.base.Transform` Transform **from the template space back to this image**. Defines, for each True pixel location on the template, which pixel location should be sampled from on this image. warp_landmarks : bool, optional If ``True``, warped_image will have the same landmark dictionary as self, but with each landmark updated to the warped position. Default: ``False`` interpolator : 'scipy' or 'c', optional The interpolator that should be used to perform the warp. Default: 'scipy' kwargs : dict Passed through to the interpolator. See `menpo.interpolation` for details. Returns ------- warped_image : type(self) A copy of this image, warped. """ # enforce the order as 0, for this boolean data, then call super manually_set_order = kwargs.get('order', 0) if manually_set_order != 0: raise ValueError( "The order of the interpolation on a boolean image has to be " "0 (attempted to set {})".format(manually_set_order)) kwargs['order'] = 0 return Image.warp_to(self, template_mask, transform, warp_landmarks=warp_landmarks, interpolator=interpolator, **kwargs) def _build_warped_image(self, template_mask, sampled_pixel_values, **kwargs): r""" Builds the warped image from the template mask and sampled pixel values. Overridden for BooleanImage as we can't use the usual from_vector_inplace method. """ warped_image = BooleanImage.blank(template_mask.shape) # As we are a mask image, we have to implement the update a little # more manually than other image classes. warped_image.pixels[warped_im
from buildbot.status import tests from buildbot.process.step import SUCCESS, FAILURE, BuildStep from buildbot.process.step_twisted import RunUnitTests from zope.interface import implements from twisted.python import log, failure from twisted.spread import jelly from twisted.pb.tokens import BananaError from twisted.web.html import PRE from twisted.web.error import NoResource class Null: pass ResultTypes = Null() ResultTypeNames = ["SKIP", "EXPECTED_FAILURE", "FAILURE", "ERROR", "UNEXPECTED_SUCCESS", "SUCCESS"] try: from twisted.trial import reporter # introduced in Twisted-1.0.5 # extract the individual result types for name in ResultTypeNames: setattr(ResultTypes, name, getattr(reporter, name)) except ImportError: from twisted.trial import unittest # Twisted-1.0.4 has them here for name in ResultTypeNames: setattr(ResultTypes, name, getattr(unittest, name)) log._keepErrors = 0 from twisted.trial import remote # for trial/jelly parsing import StringIO class OneJellyTest(tests.OneTest): def html(self, request): tpl = "<HTML><BODY>\n\n%s\n\n</body></html>\n" pptpl = "<HTML><BODY>\n\n<pre>%s</pre>\n\n</body></html>\n" t = request.postpath[0] # one of 'short', 'long' #, or 'html' if isinstance(self.results, failure.Failure): # it would be nice to remove unittest functions from the # traceback like unittest.format_exception() does. if t == 'short': s = StringIO.StringIO() self.results.printTraceback(s) return pptpl % PRE(s.getvalue()) elif t == 'long': s = StringIO.StringIO() self.results.printDetailedTraceback(s) return pptpl % PRE(s.getvalue()) #elif t == 'html': # return tpl % formatFailure(self.results) # ACK! source lines aren't stored in the Failure, rather, # formatFailure pulls them (by filename) from the local # disk. Feh. Even printTraceback() won't work. Double feh. return NoResource("No such mode '%s'" % t) if self.results == None: return tpl % "No results to show: test probably passed." # maybe results are plain text? return pptpl % PRE(self.results) class TwistedJellyTestResults(tests.TestResults): oneTestClass = OneJellyTest def describeOneTest(self, testname): return "%s: %s\n" % (testname, self.tests[testname][0]) class RunUnitTestsJelly(RunUnitTests): """I run the unit tests with the --jelly option, which generates machine-parseable results as the tests are run. """ trialMode = "--jelly" implements(remote.IRemoteReporter) ourtypes = { ResultTypes.SKIP: tests.SKIP, ResultTypes.EXPECTED_FAILURE: tests.EXPECTED_FAILURE, ResultTypes.FAILURE: tests.FAILURE, ResultTypes.ERROR: tests.ERROR, ResultTypes.UNEXPECTED_SUCCESS: tests.UNEXPECTED_SUCCESS, ResultTypes.SUCCESS: tests.SUCCESS, } def __getstate__(self): #d = RunUnitTests.__getstate__(self) d = self.__dict__.copy() # Banana subclasses are Ephemeral if d.has_key("decoder"): del d['decoder'] return d def start(self): self.decoder = remote.DecodeReport(self) # don't accept anything unpleasant from the (untrusted) build slave # The jellied stream may have Failures, but everything inside should # be a string security = jelly.SecurityOptions() security.allowBasicTypes() security.allowInstancesOf(failure.Failure) self.decoder.taster = security self.results = TwistedJellyTestResults() RunUnitTests.start(self) def logProgress(self, progress): # XXX: track number of tests BuildStep.logProgress(self, progress) def addStdout(self, data): if not self.decoder: return try: self.decoder.dataReceived(data) except BananaError: self.decoder = None log.msg("trial --jelly output unparseable, traceback follows") log.deferr() def remote_start(self, expectedTests, times=None): print "remote_start", expectedTests def remote_reportImportError(self, name, aFailure, times=None): pass def remote_reportStart(self, testClass, method, times=None): print "reportStart", testClass, method def remote_reportResults(self, testClass, method, resultType, results, times=None): print "reportResults", testClass, method, resultType which = testClass + "." + method self.results.addTest(which, self.ourtypes.get(resultType, tests.UNKNOWN), results) def finished(self, rc): # give self.results to our Build object self.build.testsFinished(self.results) total = self.results.countTests() count = self.results.countFailures() result = SUCCESS if total == None: result = (FAILURE, ['tests%s' % self.rtext(' (%s)')]) if count: result = (FAILURE, ["%d tes%s%s" % (count, (count == 1 and 't' or 'ts'), self.rtext(' (%s)'))]) return self.stepComplete(result) def finishStatus(self, result): total = self.results.countTests() count = self.resu
lts.countFailures() color = "green" text = [] if count == 0: text.extend(["%d %s" % \ (total, total == 1 and "test" or "tests"), "passed"]) else: text.append("tests") text.append("%d %s" % \ (count,
count == 1 and "failure" or "failures")) color = "red" self.updateCurrentActivity(color=color, text=text) self.addFileToCurrentActivity("tests", self.results) #self.finishStatusSummary() self.finishCurrentActivity()
#!/usr/bin/env python3 """ Calculate minor reads coverage. Minor-read ratio (MRR), which was defined as the ratio of reads for the less covered allele (reference or variant allele) over the total number of reads covering the position at which the variant was called. (Only applied to hetero sites.) @Author: wavefancy@gmail.com Usage: MinorReadsCoverage.py (-o| -f cutoff) MinorReadsCoverage.py -h | --help | -v | --version Notes: 1. Read vcf file from stdin. 2. MinorReadsCoverage only calculated from hetero sites. 3. Output results to stdout. Options: -o Output MinorReadsCoverage statistics. -f cutoff Filter out sites if MRC < cutoff. -t tags Comma separated tag list. -h --help Show this screen. -v --version Show version. """ import sys from docopt import docopt from signal import signal, SIGPIPE, SIG_DFL signal(SIGPIPE, SIG_DFL) def ShowFormat(): '''Input File format example:''' print(''' '''); if __name__ == '__main__': args = docopt(__doc__, version='1.0') #print(args) # if(args['--format']): # ShowFormat() # sys.exit(-1) from pysam import VariantFile vcfMetaCols=9 #number of colummns for vcf meta information. tags = ['GT','AD'] #GATK, AD: reads depth for ref and alt allele. cutoff = 1 if args['-f']: cutoff = float(args['-f']) # def depth(geno): # '''reformat a genotype record''' # ss = geno.split(':') # if ss[outGenoArrayIndex[0]][0] != '.' and : # # # try: # out = [ss[x] for x in outGenoArrayIndex] # return out # except IndexError: # sys.stderr.write('ERROR: Index out of range. geno: %s, out index: %s\n'%(geno, str(outGenoArrayIndex))) # sys.exit(-1) outGenoArrayIndex = [] def setoutGenoArrayIndex(oldFormatTags): outGenoArrayIndex.clear() ss = oldFormatTags.upper().split(':') for x in tags: try: y = ss.index(x) outGenoArrayIndex.append(y) except ValueError: sys.stderr.write('ERROR: can not find tag: "%s", from input vcf FORMAT field.\n'%(x)) sys.exit(-1) infile = VariantFile('-', 'r') if args['-f']: sys.stdout.write(str(infile.header)) if args['-o']: sys.stdout.write('#CHROM\tPOS\tREF\tALT\tMRR\n') for line in infile: ss = str(line).strip().split() setoutGenoArrayIndex(ss[8]) #Check format line by line. ref = 0 alt = 0 for x in ss[vcfMetaCols:]: #if not outGenoArrayIndex: # setoutGenoArrayIndex(ss[8]) #out.append(reformat(x)) temp = x.split(':') if temp[outGenoArrayIndex[0]][0] != '.' and temp[outGenoArrayIndex[0]][0] != temp[outGenoArrayIndex[0]][2]:
ad =[int(y) for y in temp[outGenoArrayIndex[1]].split(',')] ref += ad[0] alt += sum(ad[1:]) out = ss[:2] + ss[3:5] mrc = 1 if ref == 0 and alt == 0: mrc = 1 else: minor = min(alt*1.0/(alt + ref), ref*1.0/(alt + ref)) mrc = minor if args['-o']: out = ss[:2] + ss[3:5] + ['%.4f'%(mrc)] sys.stdout.write('%s\n'%('\t'.join(out))) if args[
'-f']: if mrc >= cutoff: sys.stdout.write('%s'%(str(line))) infile.close() sys.stdout.flush() sys.stdout.close() sys.stderr.flush() sys.stderr.close()
# -*- coding: utf-8 -*- # pylint: disable=no-init """ Django settings for home_web project. Generated by 'django-admin startproject' using Django 1.10. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os import re from celery.schedules import crontab from configurations import Configuration, values class CeleryBrokerURLValue(values.Value): """ Value subclass that converts 'unix://' scheme to 'redis+socket://'. """ def to_python(self, value): return re.sub( r'^unix://', 'redis+socket://', super().to_python(value) ) class Common(Configuration): # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '5w$77!lmo&g)e5j6uhl4i2=nffnnj0y1y07(9@-f)@b7*g%+sd' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] INTERNAL_IPS = [ '127.0.0.1', ] # Application definition INSTALLED_APPS = [ 'core.apps.CoreConfig', 'heating.apps.HeatingConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_filters', 'rest_framework', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'home_web.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'home_web.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = values.DatabaseURLValue( 'sqlite:///{}'.format(os.path.join(BASE_DIR, 'db.sqlite3')) ) # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.' 'UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.' 'MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.' 'CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.' 'NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'fr-FR' TIME_ZONE = 'Europe/Paris' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' REDIS_URL = values.Value() CELERY_BROKER_URL = CeleryBrokerURLValue(environ_name='REDIS_URL') CELERY_TASK_ROUTES = { 'heating.tasks.*': {'queue': 'celery', 'delivery_mode': 'transient'}, } CELERY_BEAT_SCHEDULE = { 'update-pilotwire-status': { 'task': 'heating.pilotwire.update_status', 'schedule': 60, }, 'set-pilotwire-modes': { 'task': 'heating.pilotwire.set_modes', 'schedule': crontab(minute='*/15'), }, 'weekly-clear-old-derogations': { 'task': 'heating.tasks.clearoldderogations', 'schedule': crontab(minute=0, hour=0, day_of_week='mon'), 'args': (7,), }, } CELERY_TIME_ZONE = TIME_ZONE PILOTWIRE_IP = values.IPValue() PILOTWIRE_PORT = values.IntegerValue() class Dev(Common): """ The in-development settings and the default configuration """ INSTALLED_APPS = Common.INSTALLED_APPS + [ 'debug_toolbar', ] MIDDLEWARE = [ 'debug_toolbar.middleware.DebugToolbarMiddleware', ] + Common.MIDDLEWARE class Test(Common): """ The testing settings """ LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'pilotwire_testing_handler': { 'level': 'INFO', 'class': 'heating.log.PilotwireHandler', 'logLength': 5, }, }, 'loggers': { 'pilotwire_testing_logger': { 'handlers': ['pilotwire_testing_handler'], 'level': 'INFO', }, }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' ADMINS = [('Test', 'test@example.com')] class Prod(Common): """ The in
-production settings """ DEBUG = False SECRET_KEY = values.SecretValue() ADMINS = values.SingleNestedTupleValue()
ALLOWED_HOSTS = values.ListValue() DATABASES = values.DatabaseURLValue() EMAIL = values.EmailURLValue() REST_FRAMEWORK = { 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', ), 'DEFAULT_PARSER_CLASSES': ( 'rest_framework.parsers.JSONParser', ), } STATIC_ROOT = values.PathValue() LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'pilotwire_handler': { 'level': 'INFO', 'class': 'heating.log.PilotwireHandler', 'logLength': 500, }, }, 'loggers': { 'heating.pilotwire': { 'handlers': ['pilotwire_handler'], 'level': 'INFO', }, }, } # Authentication AUTHENTICATION_BACKENDS = [ 'core.auth.backends.SettingsBackend', ] + Common.AUTHENTICATION_BACKENDS # pylint: disable=no-member ADMIN_LOGIN = values.Value() ADMIN_PASSWORD = values.SecretValue()
#!/usr/bin/env python import sys def gen_test(n): print "CREATE TABLE t (a CHAR(%d));" % (n) for v in [ 'hi', 'there', 'people' ]: print "INSERT INTO t VALUES ('%s');" % (v) for i in range(2,256): if i < n: print "--replace_regex /MariaDB/XYZ/ /MySQL/XYZ/" print "--error ER_UNSUPPORTED_EXTENSION" else: print "CREATE TABLE ti LIKE t;" print "ALTER TABLE ti ENGINE=myisam;" print "INSERT INTO ti SELECT * FROM t;" print "ALTER TABLE ti CHANGE COLUMN a a CHAR(%d);" % (i) print "ALTER TABLE t CHAN
GE COLUMN a a CHAR(%d);" % (i) if i >= n: print "let $diff_tables=test.t, test.ti;" print "source include/diff_tables.inc;"
print "DROP TABLE ti;" print "DROP TABLE t;" def main(): print "source include/have_tokudb.inc;" print "# this test is generated by change_char.py" print "# test char expansion" print "--disable_warnings" print "DROP TABLE IF EXISTS t,ti;" print "--enable_warnings" print "SET SESSION DEFAULT_STORAGE_ENGINE=\"TokuDB\";" print "SET SESSION TOKUDB_DISABLE_SLOW_ALTER=1;" # all n takes too long to run, so here is a subset of tests for n in [ 1, 2, 3, 4, 5, 6, 7, 8, 16, 31, 32, 63, 64, 127, 128, 254, 255 ]: gen_test(n) return 0 sys.exit(main())
from django.contrib import admin from devilry.devilry_dbcache.models import AssignmentGroupCachedData @admin.register(AssignmentGroupCachedData) class AssignmentGroupCachedDataAdmin(admin.ModelAdmin): list_display = [ 'id', 'group', 'first_feedbackset', 'last_feedbackset', 'last_published_feedbackset', 'new_attempt_count', 'public_total_comment_count', 'public_student_comment_count', 'public_examiner_comment_count', 'public_admin_comment_count', 'public_student_file_upload_count', 'examiner_count', 'candidate_count' ] search_fields = [ 'id', 'group__id', 'group__parentnode__id', 'group__parentnode__short_name', 'group__parentnode__long_name', 'group__parentnode__parentnode__id',
'group__parentnode__parentnode__short_name', 'group__parentnode__parentnode__long_name', 'group__parentnode__parentnode__parentnode__id', 'group__parentnode__parentnode__parentnode__short_name', 'group__parentnode__parentnode__parentnode__long_name', 'group__candidates__relatedstudent__candidate_id', 'group__candidates__relatedstudent__candidate_id', 'group__candidates__relatedstudent__user__shortname', 'group__candi
dates__relatedstudent__user__fullname', 'group__examiners__relatedexaminer__user__shortname', 'group__examiners__relatedexaminer__user__fullname', ]
# -*- coding: utf-8 -*- ############################################################################### # # GetTimestamp # Returns the current date and time, expressed as seconds or milliseconds since January 1, 1970 (epoch time). # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class GetTimestamp(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GetTimestamp Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GetTimestamp, self).__init__(temboo_session, '/Library/Utilities/Dates/GetTimestamp') def new_input_set(self): return GetTimestampInputSet() def _make_result_set(self, result, path): return GetTimestampResultSet(result, path) def _make_execution(self, session, exec_id, path): return GetTimestampChoreographyExecution(session, exec_id, path) class GetTimestampInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GetTimestamp Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AddDays(self, value): """ Set the value of the AddDays input for this Choreo. ((optional, integer) Adds the specified number of days to the specified date serial number. A negative number will subtract.) """ super(GetTimestampInputSet, self)._set_input('AddDays', value) def set_AddHours(self, value): """ Set the value of the AddHours input for this Choreo. ((optional, integer) Adds the specified number of hours to the specified date serial number. A negative number will subtract.) """ super(GetTimestampInputSet, self)._set_input('AddHours', value) def set_AddMinutes(self, value): """ Set the value of the AddMinutes input for this Choreo. ((optional, integer) Adds the specified number of minutes to the specified date serial number. A negative number will subtract.) """ super(GetTimestampInputSet, self)._set_input('AddMinutes', value) def set_AddMonths(self, value): """ Set the value of the AddMonths input for this Choreo. ((optional, integer) Adds the specified number of months to the specified date serial number. A negative number will subtract.) """ super(GetTimestampInputSet, self)._set_input('AddMonths', value) def set_AddSeconds(self, value): """ Set the value of the AddSeconds input for this Choreo. ((optional, integer) Adds the specified number of seconds to the specified date serial number. A negative number will subtract.) """ super(GetTimestampInputSet, self)._set_input('AddSeconds', value) def set_AddYears(self, value): """ Set the value of the AddYears input for this Choreo. ((optional, integer) Adds the specified number of years to the specified date serial number. A negative number will subtract.) """ super(GetTimestampInputSet, self)._set_input('AddYears', value) def set_Granularity(self, value): """ Set the value of the Granularity input for this Choreo. ((optional, string) Set to "seconds" to return the number of seconds since the epoch. Defaults to "milliseconds".) """ super(GetTimestampInputSet, self)._set_input('Granularity', value) def set_SetDay(self, value): """ Set the value of the SetDay input for this Choreo. ((optional, integer) Sets the day of month (1–31) of the specified date serial number.) """ super(GetTimestampInputSet, self)._set_input('SetDay', value) def set_SetHour(self, value): """ Set the value of the SetHour input for th
is Choreo. ((optional, integer)
Sets the hours (0–23) of the specified date serial number.) """ super(GetTimestampInputSet, self)._set_input('SetHour', value) def set_SetMinute(self, value): """ Set the value of the SetMinute input for this Choreo. ((optional, integer) Sets the minutes (0–59) of the specified date serial number.) """ super(GetTimestampInputSet, self)._set_input('SetMinute', value) def set_SetMonth(self, value): """ Set the value of the SetMonth input for this Choreo. ((optional, integer) Sets the month (1–12) of the specified date serial number.) """ super(GetTimestampInputSet, self)._set_input('SetMonth', value) def set_SetSecond(self, value): """ Set the value of the SetSecond input for this Choreo. ((optional, integer) Sets the seconds (0–59) of the specified date serial number.) """ super(GetTimestampInputSet, self)._set_input('SetSecond', value) def set_SetYear(self, value): """ Set the value of the SetYear input for this Choreo. ((optional, integer) Sets the year (such as 1989) of the specified date serial number.) """ super(GetTimestampInputSet, self)._set_input('SetYear', value) class GetTimestampResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GetTimestamp Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Timestamp(self): """ Retrieve the value for the "Timestamp" output from this Choreo execution. ((date) A the current timestamp, expressed as the number of seconds or milliseconds since January 1, 1970 (epoch time). The Granularity input is used to indicate seconds or milliseconds.) """ return self._output.get('Timestamp', None) class GetTimestampChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GetTimestampResultSet(response, path)
import sys, os def stop(arv): pwd = os.getcwd() # if argv given, f
olders = [argv] # else, folders = pwd ### for each folder in folders ##### check pwd/folder/temp/pids for existing pid files ####### kill -15 & rm files def main(): print "Please don't try to
run this script separately." if __name__ == '__main__': main()
unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin class TestListInterface(object): config = """ templates: global: disable: [seen] tasks: list_get: entry_list: test_list list_1_get: entry_list: list 1 list_2_get: entry_list: list 2 test_list_add: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} accept_all: yes list_add: - entry_list: test_list list_1_add: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} accept_all: yes list_add: - entry_list: list 1 list_2_add: mock: - {title: 'title 3', url: "http://mock.url/file3.torrent"} accept_all: yes list_add: - entry_list: list 2 test_multiple_list_add: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} accept_all: yes list_add: - entry_list: list 1 - entry_list: list 2 test_list_accept_with_remove: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} - {title: 'title 3', url: "http://mock.url/file3.torrent"} list_match: from: - entry_list: test_list test_list_accept_without_remove: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} - {title: 'title 3', url: "http://mock.url/file3.torrent"} list_match: from: - entry_list: test_list remove_on_match: no test_multiple_list_accept_with_remove: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} - {title: 'title 3', url: "http://mock.url/file3.torrent"} list_match: from: - entry_list: list 1 - entry_list: list 2 test_multiple_list_accept_without_remove: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 2', url: "http://mock.url/file2.torrent"} - {title: 'title 3', url: "http://mock.url/file3.torrent"} list_match: from: - entry_list: list 1 - entry_list: list 2 remove_on_match: no test_list_remove: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} accept_all: yes list_remove: - entry_list: test_list test_list_reject: mock: - {title: 'title 1', url: "http://mock.url/file1.torrent"} - {title: 'title 3', url: "http://mock.url/file3.torrent"} list_match: from: - entry_list: test_list action: reject add_for_list_queue: mock: - {title: 'The 5th Wave', url: "", imdb_id: "tt2304933"} - {title: 'Drumline', url: "", imdb_id: "tt0303933"} accept_all: yes list_add: - movie_list: test_list_queue test_list_queue: mock: - {title: 'Drumline 2002 1080p BluRay DTS-HD MA 5 1 x264-FuzerHD', url: "http://mock.url/Drumline 2002 1080p BluRay DTS-HD MA 5 1 x264-FuzerHD.torrent", imdb_id: "tt0303933"} - {title: 'Drumline 2002 720p BluRay DTS-HD MA 5 1 x264-FuzerHD', url: "http://mock.url/Drumline 2002 720p Bl
uRay DTS-HD MA 5 1 x264-FuzerHD.torrent", imdb_id: "tt0303933"} - {title: 'Drumline 2002 DVDRip x264-FuzerHD', url: "http://mock.url/Drumline 2002 DVDRip x264-FuzerHD.torrent", imdb_id: "tt0303933"} list_match: from: - movie_list: test_list_queue single_match: yes get_for_list_queue: movie_list: test_list_queue test_list_clear_start:
entry_list: test_list list_clear: what: - entry_list: test_list test_list_clear_exit: entry_list: test_list list_clear: what: - entry_list: test_list phase: exit test_list_clear_input: entry_list: test_list list_clear: what: - entry_list: test_list phase: input """ def test_list_add(self, execute_task): task = execute_task('test_list_add') assert len(task.entries) == 2 task = execute_task('list_get') assert len(task.entries) == 2 def test_multiple_list_add(self, execute_task): task = execute_task('test_multiple_list_add') assert len(task.entries) == 2 task = execute_task('list_1_get') assert len(task.entries) == 2 task = execute_task('list_2_get') assert len(task.entries) == 2 def test_list_accept_with_remove(self, execute_task): task = execute_task('test_list_add') assert len(task.entries) == 2 task = execute_task('list_get') assert len(task.entries) == 2 task = execute_task('test_list_accept_with_remove') assert len(task.all_entries) == 3 assert len(task.accepted) == 2 task = execute_task('list_get') assert len(task.entries) == 0 def test_list_accept_without_remove(self, execute_task): task = execute_task('test_list_add') assert len(task.entries) == 2 task = execute_task('list_get') assert len(task.entries) == 2 task = execute_task('test_list_accept_without_remove') assert len(task.all_entries) == 3 assert len(task.accepted) == 2 task = execute_task('list_get') assert len(task.entries) == 2 def test_multiple_list_accept_with_remove(self, execute_task): task = execute_task('list_1_add') assert len(task.entries) == 2 task = execute_task('list_2_add') assert len(task.entries) == 1 task = execute_task('list_1_get') assert len(task.entries) == 2 task = execute_task('list_2_get') assert len(task.entries) == 1 task = execute_task('test_multiple_list_accept_with_remove') assert len(task.accepted) == 3 task = execute_task('list_1_get') assert len(task.entries) == 0 task = execute_task('list_2_get') assert len(task.entries) == 0 def test_multiple_list_accept_without_remove(self, execute_task): task = execute_task('list_1_add') assert len(task.entries) == 2 task = execute_task('list_2_add') assert len(task.entries) == 1 task = execute_task('list_1_get') assert len(task.entries) == 2 task = execute_task('list_2_get') assert len(task.entries) == 1 task = execute_task('test_multiple_list_accept_without_remove') assert len(task.accepted) == 3 task = execute_task('list_1_get') assert len(task.entries) == 2 task = execute_task('list_2_get') assert len(task.entries) == 1 def test_list_remove(self, execute_task): task = execute_task('test_list_add') assert len(task.entries) == 2 task = execute_task('list_get') assert len(task.entries) == 2 task = execute_task('test_list_remove') assert len(tas
Tetherless World Constellation at Rensselaer Polytechnic Institute # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. ############################################################################### import os import sys from zipfile import ZipFile, ZIP_DEFLATED from contextlib import closing import paramiko import qrcode from IPython.core.display import HTML, Image from IPython.display import display, Javascript import envoy from datetime import datetime class shareUtil(): def zipdir(self, basedir, archivename, rm='no'): """ utility function to zip a single file or a directory usage : zipdir(input, output) @param basedir: input file or directory @param archivename: output file.zip @param rm: [yes, no], remove source file (optional, default=no) """ assert os.path.isdir(basedir) with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z: for root, dirs, files in os.walk(basedir): #NOTE: ignore empty directories for fn in files: #print fn absfn = os.path.join(root, fn) zfn = absfn[len(basedir) + len(os.sep):] #XXX: relative path z.write(absfn, zfn) if rm != 'no': instruction = 'rm -rf %s' % basedir os.system(instruction) def uploadfile(self, username='epi', password='epi', hostname='localhost', port=22, inputfile=None, outputfile=None, link=False, apacheroot='/var/www/', zip=False, qr=False): ''' utility to upload file on remote server using sftp protocol usage : uploadfile(inputfile, outputfile) @rtype : str @param username: str - username on remote server @param password: str - password to access remote server @param ho
stname: str - hostname of remote server (default: localhost) @param port: port number on remote server (default: 22) @param inputfile: str - local path to the file to uploaded @param outputfile: remot
e path to the file to upload @param link: bolean [True, False] default False, print a link to download the file (remote path needs to be in a web available directory) @param apacheroot: path to apache root default to '/var/www/' required if link == True @param zip: bolean deafault False, zip the output @param qr: bolean deafault False, return qrcode as image @return: link to uploaded file if link=True or qr image if qr=True & link=True, none if link is set to false ''' if zip: #print 'add zipfile' zipfile = str(inputfile + '.zip') self.zipdir(inputfile, zipfile) inputfile = zipfile #paramiko.util.log_to_file('/var/www/esr/paramiko.log') client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname, username=username, password=password) transport = paramiko.Transport((hostname, port)) transport.connect(username=username, password=password) sftp = paramiko.SFTPClient.from_transport(transport) parts = outputfile.split('/') for n in range(2, len(parts)): path = '/'.join(parts[:n]) #print 'Path:', path, sys.stdout.flush() try: s = sftp.stat(path) #print 'mode =', oct(s.st_mode) except IOError as e: #print e #print 'adding dir: ', path sftp.mkdir(path) try: sftp.put(remotepath=outputfile, localpath=inputfile) sftp.close() transport.close() print 'file uploaded' if qr: if link: pass if not link: print 'WORNING: qrcode not generated, set the option link to True' if link: filelink = outputfile.replace(apacheroot, '') link = 'http://' + os.path.normpath(hostname + '/' + filelink) raw_html = '<a href="%s" target="_blank">ESR results</a>' % link print 'results are now available for download at : ', link image = None if qr: imagefile = parts[-1].split('.')[0] + '.jpeg' qr = qrcode.QRCode(version=1, error_correction=qrcode.constants.ERROR_CORRECT_L, box_size=10, border=4) qr.add_data(link) qr.make(fit=True) img = qr.make_image() img.save(imagefile, "JPEG") print 'alive' image = Image(imagefile) return image if not qr: return HTML(raw_html) except IOError: print "Error: can\'t find file or read data check if input file exist and or remote location is writable" def gistit(self, filename, jist='/usr/local/bin/jist', type='notebook'): ''' use the jist utility to paste a txt file on github as gist and return a link to it usage : gistit(notebookfile) @param filename: str - path to the a text file or notebook file (.json) @param jist: str - path to the executable jist (default=/usr/local/bin/jist) @param type: str - notebook, text @return: return a link to gist if type=text, link to nbviewer if type=notebook ''' try: with open(filename): link = None jist = self.which(jist) if jist: try: r = envoy.run('%s -p %s' % (jist, filename)) if type == 'notebook': link = r.std_out.replace('\n', '').replace('https://gist.github.com', 'http://nbviewer.ipython.org') if type == 'text': link = r.std_out.replace('\n', '') return link except: print "can't generate gist, check if jist works bycommand line with: jist -p filename" if not jist: print 'cannot find jist utility, check if it is in your path' except IOError: print 'input file %s not found' % filename def get_id(self, suffix, makedir=True): ''' generate a directory based on the suffix and a time stamp output looks like : suffix_Thursday_26_September_2013_06_28_49_PM usage: getID(suffix) @param suffix: str - suffix for the directory to be generated, @return: str - directory name ''' ID = suffix + '_' + str(datetime.now().utcnow().strftime("%A_%d_%B_%Y_%I_%M_%S_%p")) if makedir: self.ensure_dir(ID) print 'session data directory : ID', ID return ID def ensure_dir(self, dir): ''' make a directory on the file sys
ord(cells, direction=DOWN): """ Under the hood: given two cells and a favoured direction, get the position of the cell with the column of one and the row of the other: A---->+ | ^ | | | | v | *<----B Both + and * are candidates for the junction of A and B - we take the one furthest down by default (specified by direction) >>> cells_dr = (_XYCell(0,1,2,None), _XYCell(0,3,4,None)) >>> junction_coord(cells_dr, DOWN) (1, 4) >>> junction_coord(cells_dr, UP) (3, 2) >>> junction_coord(cells_dr, LEFT) (1, 4) >>> junction_coord(cells_dr, RIGHT) (3, 2) >>> cells_tr = (_XYCell(0,1,4,None), _XYCell(0,3,2,None)) >>> junction_coord(cells_tr, DOWN) (3, 4) >>> junction_coord(cells_tr, UP) (1, 2) >>> junction_coord(cells_tr, LEFT) (1, 2) >>> junction_coord(cells_tr, RIGHT) (3, 4) """ new_cells = ( (cells[0].x, cells[1].y), (cells[1].x, cells[0].y) ) for index, value in enumerate(direction): if value == 0: continue if cmp(new_cells[0][index], new_cells[1][index]) == value: return new_cells[0] else: return new_cells[1] (x, y) = junction_coord((self, other), direction) if paranoid and (x, y) == (self.x, self.y) or \ (x, y) == (other.x, other.y): raise JunctionError( "_XYCell.junction(_XYCell) resulted in a cell which is equal" " to one of the input cells.\n" " self: {}\n other: {}\n x: {}\n y: {}".format( self, other, x, y)) junction_bag = self.table.get_at(x, y) if len(junction_bag) == 0: return self_bag = Bag(self.table) self_bag.add(self) other_bag = Bag(self.table) other_bag.add(other) yield (self_bag, other_bag, junction_bag) def shift(self, x=0, y=0): """Get the cell which is offset from this cell by x columns, y rows""" if not isinstance(x, int): assert y == 0, \ "_XYCell.shift: x=%r not integer and y=%r specified" % (x, y) return self.shift(x[0], x[1]) return self.table.get_at(self.x + x, self.y + y)._cell class CoreBag(object): """Has a collection of _XYCells""" def pprint(self, *args, **kwargs): return contrib_excel.pprint(self, *args, **kwargs) def as_list(self, *args, **kwargs): return contrib_excel.as_list(self, *args, **kwargs) def filter_one(self, filter_by): return contrib_excel.filter_one(self, filter_by) def excel_locations(self, *args, **kwargs): return contrib_excel.excel_locations(self, *args, **kwargs) def __init__(self, table): self.__store = set() self.table = table def add(self, cell): """Add a cell to this bag""" if not isinstance(cell, _XYCell): raise TypeError("Can only add _XYCell types to Bags: {}".format( cell.__class__)) self.__store.add(cell) def __eq__(self, other): """Compare two bags: they are equal if: * their table are the same table (object) * they contain the same set of cells""" if not isinstance(other, CoreBag): return False return (self.table is other.table and self.__store == other.__store) def __len__(self): return len(self.__store) def __repr__(self): return repr(self.__store) @classmethod def singleton(cls, cell, table): """ Construct a bag with one cell in it """ bag = cls(table=table) bag.add(cell) return bag @property def unordered(self): """ Obtain an unordered iterator over this bag. iter(bag) is sorted on demand, and therefore inefficient if being done repeatedly where order does not matter. """ return (Bag.singleton(c, table=self.table) for c in self.__store) @property def unordered_cells(self): """ Analogous to the `unordered` property, except that it returns _XYCells instead of Bags. """ return iter(self.__store) def __iter__(self): """ Return a view of the cells in this back in left-right, top-bottom order Note: this is expensive for large bags (when done repeatedly). If you don't care about order, use `bag.unordered`, which gives an unordered iterator. """ def yx(cell): return cell.y, cell.x for cell in sorted(self.__store, key=yx): yield Bag.singleton(cell, table=self.table) def __sub__(self, rhs): """Bags quack like sets. Implements - operator.""" return self.difference(rhs) def difference(self, rhs): """Bags quack like sets.""" assert self.table is rhs.table,\ "Can't difference bags from separate tables" new = copy(self) new.__store = self.__store.difference(rhs.__store) return new def __or__(self, rhs): """Bags quack like sets. Implements | operator. For mathematical purity, + (__add__) isn't appropriate""" return self.union(rhs) def union(self, rhs): """Bags quack like sets.""" assert self.table is rhs.table, "Can't union bags from separate tables" new = copy(self) new.__store = self.__store.union(rhs.__store) return new def __and__(self, rhs): return self.intersection(rhs) def intersection(self, rhs): assert self.table is rhs.table, \ "Can't take intersection of bags from separate tables" new = copy(self) new.__store = self.__store.intersection(rhs.__store) return new def select(self, function): """Select cells from this bag's table based on the cells in this bag. e.g. bag.select(lambda bag_cell, table_cell: bag_cell.y == table_cell.y and bag_cell.value == table_cell.value) would give cells in the table with the same name on the same row as a cell in the bag"""
return self.table.select_other(function, self) def select_other(self, function, other): """A
more general version of select, where another bag to select from is explicitly specified rather than using the original bag's table""" """note: self.select(f) = self.table.select_other(f, self)""" newbag = Bag(table=self.table) for bag_cell in self.__store: for other_cell in other.__store: if function(bag_cell, other_cell): newbag.add(bag_cell) break return newbag def filter(self, filter_by): """ Returns a new bag containing only cells which match the filter_by predicate. filter_by can be: a) a callable, which takes a cell as a parameter and returns True if the cell should be returned, such as `lambda cell: cell value == 'dog' b) a string, to match exactly: `u'dog'` c) a hamcrest match rule: `hamcrest.equal_to("dog") (requires hamcrest to be available) d) a compiled regex: `re.compile("dog") """ if callable(filter_by): return self._filter_internal(filter_by) elif isinstance(filter_by, six.string_types): return self._filter_internal(lambda cell: six.text_type(cell.value).strip() == filter_by) elif have_ham and isinstance(filter_by, hamcrest.matcher.Matcher): return self._filter_internal(lambda cell: filter_by.matches(cell
from django.db import models from stdimage import StdImageField from django.core.validators import RegexValidator import datetime YEAR_CHOICES = [] for r in range(1980, (datetime.datetime.now().year+1)): YEAR_CHOICES.append((r,r)) S_CHOICE = [('1stYear','1stYear'),('2ndYear','2ndYear'),('3rdYear','3rdYear'),('4thYear','4thYear')] # Create your models here. class Hostel(models.Model): HostelName = models.CharField(max_length=100, primary_key=True) HostelType = models.CharField(max_length=10) HostelSeat = models.IntegerField() HostelImage = StdImageField(upload_to='Hostels/logo/',variations={'large': (675, 300,True)}) HostelAddress = models.CharField(max_length=200) HostelDescription = models.TextField() HostelEmail = models.EmailField() phone_regex = RegexValidator(regex=r'^\+?1?\d{10,13}$', message="Phone number must be entered in the format: '+999999999'. Up to 13 digits allowed.") HostelPhoneNo = models.CharField(max_length=13,validators=[phone_regex], blank=True) def __str__(self): return self.HostelName class HostelEvents(models.Model): HostelName = models.ForeignKey(Hostel) HostelEventsName = models.CharField(max_length=100) HostelEventDescription = models.TextField() def __str__(self): return self.HostelEventsName class HostelPictureGalary(models.Model): HostelName = models.ForeignKey(Hostel) PictureN
ame = models.CharField(max_length=100) PictureLoc
ation = StdImageField(upload_to='Hostels/galary/',variations={'large': (675, 300,True)}) def __str__(self): return self.PictureName class HostelBody(models.Model): HostelName = models.ForeignKey(Hostel) HostelbodyRole = models.CharField(max_length=100) HostelbodyRoleYear = models.IntegerField(choices=YEAR_CHOICES, default=datetime.datetime.now().year) PersonName = models.CharField (max_length=10) PersonYear = models.CharField (max_length=7, choices=S_CHOICE,default='NA') PersonImage = StdImageField(upload_to='Hostels/gb/',variations={'thumbnail': (300, 200,True)}) def __str__(self): return self.HostelbodyRole
#!/usr/bin/python2 # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. # To run this script please copy "out/<build_name>//pyproto/webrtc/modules/ # audio_coding/audio_network_adaptor/debug_dump_pb2.py" to this folder. # The you can run this script with: # "python parse_ana_dump.py -m uplink_bandwidth_bps -f dump_file.dat" # You can add as may metrics or decisions to the plot as you like. # form more information call: # "python parse_ana_dump.py --help" import struct from optparse import OptionParser import matplotlib.pyplot as plt import debug_dump_pb2 def GetNextMessageSize(file_to_parse): data = file_to_parse.read(4) if data == '': return 0 return struct.unpack('<I', data)[0] def GetNextMessageFromFile(file_to_parse): message_size = GetNextMessageSize(file_to_parse) if message_size == 0: return None try: event = debug_dump_pb2.Event() event.ParseFromString(file_to_parse.read(message_size)) except IOError: print 'Invalid message in file' return None return event def InitMetrics(): metrics = {} event = debug_dump_pb2.Event() for metric in event.network_metrics.DESCRIPTOR.fields: metrics[metric.name] = {'time': [], 'value': []} return metrics def InitDecisions(): decisions = {} event = debug_dump_pb2.Event() for decision in event.encoder_runtime_config.DESCRIPTOR.fields: decisions[decision.name] = {'time': [], 'value': []} return decisions def ParseAnaDump(dump_file_to_parse): with open(dump_file_to_parse, 'rb') as file_to_parse: metrics = InitMetrics() decisions = InitDecisions() first_time_stamp = None while True: event = GetNextMessageFromFile(file_to_parse) if event == None: break if first_time_stamp == None: first_time_stamp = event.timestamp if event.type == debug_dump_pb2.Event.ENCODER_RUNTIME_CONFIG: for decision in event.encoder_runtime_config.DESCRIPTOR.fields: if event.encoder_runtime_config.HasField(decision.name): decisions[decision.name]['time'].append(event.timestamp - first_time_stamp) decisions[decision.name]['value'].append( getattr(event.encoder_runtime_config, decision.name)) if event.type == debug_dump_pb2.Event.NETWORK_METRICS: for metric in event.network_metrics.DESCRIPTOR.fields: if event.network_metrics.HasField(metric.name): metrics[metric.name]['time'].append(event.timestamp - first_time_stamp) metrics[metric.name]['value'].append( getattr(event.network_metrics, metric.name)) return (metrics, decisions) def main(): parser = OptionParser() parser.add_option( "-f", "--dump_file", dest="dump_file_to_parse", help="dump file to parse") parser.add_option( '-m', '--metric_plot', default=[], type=str, help='metric key (name of the metric) to plot', dest='metric_keys', action='append') parser.add_option( '-d', '--decision_plot', default=[], type=str, help='decision key (name of the decision) to plot', dest='decision_keys', action='append') options = parser.parse_args()[0] if options.dump_file_to_parse == None: print "No dump file to parse is set.\n" parser.print_help() exit() (metrics, decisions) = ParseAnaDump(options.dump_file_to_parse) metric_keys = options.metric_keys decision_keys = options.decision_keys plot_count = len(metric_keys) + len(decision_keys) if plot_count == 0: print "You have to set at least one metric or decision to plot.\n" parser.print_help() exit() plots = [] if plot_count == 1: f, mp_plot = plt.subplots() plots.append(mp_plot) else: f, mp_plots = plt.subplots(plot_count, sharex=True) plots.extend(mp_plots.tolist()) for key in metric_keys: plot = plots.pop() plot.grid(True) plot.set_title(key + " (metric)") plot.plot(metrics[key]['time'], metrics[key]['value']) for key in decision_keys: plot = plots.pop()
plot.grid(True) plot.set_title(key + " (decision)") plot.plot(decisions[key]['time'], decisions[key]['value']) f.subplots_adjust(hspace=0.3) plt.show() if
__name__ == "__main__": main()
# coding=utf-8 """ Faça um programa, contendo subprogramas, que: a) Leia da entrada padrão as dimensões, quantidade de linhas e quantidade de colunas de uma matriz bidimensional; b) Gere uma matriz, onde cada célula é um número inteiro gerado aleatoriamente no intervalo 0 a 9; c) Mostre a matriz, linha a linha na tela; d) Calcule e escreva a média de todos os valores na matriz; e) Escreva o conteúdo de todas as linhas que possuam todos os seus valores acima da média calculada em (d). Dica Utilize a função random.randint(a, b), disponível na API, que retorna um número randômico inteiro entre a e b, inclusive. Restrição Não serão aceitos na correção programas que utilizam o módulo numpy. Entrada Dois números inteiros positivos são lidos, representando respectivamente: a quantidade linhas L e quantidade de colunas C da matriz a ser gerada. Saída Seu programa deverá emitir: L linhas, com C inteiros cada linha, contendo valores no intervalo 0 e 9; Uma linha em branco; Uma linha com um número de ponto flutuante, representando a média solicitada; Uma linha em branco; Zero ou mais linhas contendo C inteiros, de cada linha com a propriedade pedida """ from random import r
andint def gera_matriz(linhas, colunas): matrix = [] for linha in range(linhas): linha = [] for coluna in range(colunas): linha.append(randint(0, 9)) matrix.append(linha) return matrix def imprime_matriz(matriz): for linha in matriz: for coluna
in linha: print(coluna, end=" ") print() print() def media_da_matriz(matriz): total = 0.0 for linha in matriz: for coluna in linha: total += coluna return total / (len(matriz) * len(matriz[0])) def imprive_valores_acima_da_media(matriz, media): for linha in matriz: for coluna in linha: if coluna > media: print(coluna, end=" ") quantidade_linhas, quantidade_colunas = input().split() matriz_gerada = gera_matriz(int(quantidade_linhas), int(quantidade_colunas)) imprime_matriz(matriz_gerada) media_da_matriz = media_da_matriz(matriz_gerada) print(media_da_matriz) print() imprive_valores_acima_da_media(matriz_gerada, media_da_matriz)
#!/usr/bin/env python # Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in co
mpliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific lan
guage governing permissions and # limitations under the License. """Command-line application that demonstrates basic BigQuery API usage. This sample queries a public shakespeare dataset and displays the 10 of Shakespeare's works with the greatest number of distinct words. This sample is used on this page: https://cloud.google.com/bigquery/bigquery-api-quickstart For more information, see the README.md under /bigquery. """ # [START all] import argparse import googleapiclient.discovery from googleapiclient.errors import HttpError def main(project_id): # [START build_service] # Construct the service object for interacting with the BigQuery API. bigquery_service = googleapiclient.discovery.build('bigquery', 'v2') # [END build_service] try: # [START run_query] query_request = bigquery_service.jobs() query_data = { 'query': ( 'SELECT TOP(corpus, 10) as title, ' 'COUNT(*) as unique_words ' 'FROM [publicdata:samples.shakespeare];') } query_response = query_request.query( projectId=project_id, body=query_data).execute() # [END run_query] # [START print_results] print('Query Results:') for row in query_response['rows']: print('\t'.join(field['v'] for field in row['f'])) # [END print_results] except HttpError as err: print('Error: {}'.format(err.content)) raise err if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('project_id', help='Your Google Cloud Project ID.') args = parser.parse_args() main(args.project_id) # [END all]
""" core.mixins - Mixins available to use with models """ from django.db.models.signals import post_save def on_changed(sender, **kwargs): """ Calls the `model_changed` method and then resets the state. """ instance = kwargs.get("instance") is_new
= kwargs.get("created") dirty_fields = instance.get_dirty_fields() instance.model_changed(instance.original_state, dirty_fields, is_new) i
nstance.original_state = instance.to_dict() class ModelChangedMixin(object): """ Mixin for detecting changes to a model """ def __init__(self, *args, **kwargs): super(ModelChangedMixin, self).__init__(*args, **kwargs) self.original_state = self.to_dict() identifier = "{0}_model_changed".format(self.__class__.__name__) post_save.connect( on_changed, sender=self.__class__, dispatch_uid=identifier) def to_dict(self): """ Returns the model as a dict """ # Get all the field names that are not relations keys = (f.name for f in self._meta.local_fields if not f.rel) return {field: getattr(self, field) for field in keys} def get_dirty_fields(self): """ Returns the fields dirty on the model """ dirty_fields = {} current_state = self.to_dict() for key, value in current_state.items(): if self.original_state[key] != value: dirty_fields[key] = value return dirty_fields def is_dirty(self): """ Return whether the model is dirty An unsaved model is dirty when it has no primary key or has at least one dirty field. """ if not self.pk: return True return {} != self.get_dirty_fields() def model_changed(self, old_fields, new_fields, is_new): """ Post-hook for all fields that have been changed. """ raise NotImplementedError("Missing method `model_changed`")
# pyOCD debugger # Copyright (c) 2006-2020 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import platform import collections from time import sleep import six from .interface import Interface from .common import filter_device_by_usage_page from ..dap_access_api import DAPAccessIntf from ....utility.timeout import Timeout OPEN_TIMEOUT_S = 60.0 LOG = logging.getLogger(__name__) try: import pywinusb.hid as hid except: if platform.system() == "Windows": LOG.error("PyWinUSB is required on a Windows Machine") IS_AVAILABLE = False else: IS_AVAILABLE = True class PyWinUSB(Interface): """! @brief CMSIS-DAP USB interface class using pyWinUSB for the backend. """ isAvailable = IS_AVAILABLE def __init__(self): super(PyWinUSB, self).__init__() # Vendor page and usage_id = 2 self.report = None # deque used here instead of synchronized Queue # since read speeds are ~10-30% faster and are # comparable to a list based implementation. self.rcv_data = collections.deque() self.device = None # handler called when a report is received def rx_handler(self, data): # LOG.debug("rcv<(%d) %s" % (len(data), ' '.join(['%02x' % i for i in data]))) self.rcv_data.append(data[1:]) def open(self): self.device.set_raw_data_handler(self.rx_handler) # Attempt to open the device. # Note - this operation must be retried since # other instances of pyOCD listing board can prevent # opening this device with exclusive access. with Timeout(OPEN_TIMEOUT_S) as t_o: while t_o.check(): # Attempt to open the device try: self.device.open(shared=False) break except hid.HIDError: pass # Attempt to open the device in shared mode to make # sure it is still there try: self.device.open(shared=True) self.device.close() except hid.HIDError as exc: # If the device could not be opened in read only mode # Then it either has been disconnected or is in use # by another thread/process raise six.raise_from(DAPAccessIntf.DeviceError("Unable to open device %s" % self.serial_number), exc) else: # If this timeout has elapsed then another process # has locked this device in shared mode. This should # not happen. raise DAPAccessIntf.DeviceError("timed out attempting to open device %s" % self.serial_number) @staticmethod def get_all_connected_interfaces(): """! @brief Returns all the connected CMSIS-DAP devices """ all_devices = hid.find_all_hid_devices() # find devices with good vid/pid all_mbed_devices = [] for d in all_devices: if (d.product_name.find("CMSIS-DAP") >= 0): all_mbed_devices.append(d) boards = [] for dev in all_mbed_devices: try: dev.open(shared=True) # Perform device-specific filtering. if filter_device_by_usage_page(dev.vendor_id, dev.product_id, dev.hid_caps.usage_page): dev.close() continue report = dev.find_output_reports() if len(report) != 1: dev.close() continue new_board = PyWinUSB() new_board.report = report[0] new_board.packet_size = len(new_board.report.get_raw_data()) - 1 new_board.vendor_name = dev.vendor_name new_board.product_name = dev.product_name new_board.serial_number = dev.serial_number new_board.vid = dev.vendor_id new_board.pid = dev.product_id new_board.device = dev dev.close() boards.append(new_board) except Exception as e: if (str(e) != "Failure to get HID pre parsed data"): LOG.error("Receiving Exception: %s", e) dev.close() return boards def write(self, data): """
! @brief Write data on the OUT endpoint associated to the HID interface """ data.extend([0] * (self.packet_size - len(data))) # LOG.debug("snd>(%d) %s" % (len(data), ' '.join(['%02x' % i for i in data]))) self.report.send([0] + data) def read(self, timeout=20.0):
"""! @brief Read data on the IN endpoint associated to the HID interface """ with Timeout(timeout) as t_o: while t_o.check(): if len(self.rcv_data): break sleep(0) else: # Read operations should typically take ~1-2ms. # If this exception occurs, then it could indicate # a problem in one of the following areas: # 1. Bad usb driver causing either a dropped read or write # 2. CMSIS-DAP firmware problem cause a dropped read or write # 3. CMSIS-DAP is performing a long operation or is being # halted in a debugger raise DAPAccessIntf.DeviceError("Read timed out") return self.rcv_data.popleft() def close(self): """! @brief Close the interface """ LOG.debug("closing interface") self.device.close()
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class ConnectionMonitorParameters(Model): """Parameters that define the operation to create a connection monitor. All required parameters must be populated in order to send to Azure. :param source: Required. :type source: ~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorSource :param destination: Required. :type destination: ~azure.mgmt.network.v2018_01_01.models.ConnectionMonitorDestination :param auto_start: Determines if the connection monitor will start automatically on
ce created. Default value: True . :type auto_start: bool :param monitoring_interval_in_seconds: Monitoring interval in seconds. Default value: 60 . :type monitoring_interval_in_seconds: int """ _validation = { 'source': {'required': True}, 'destination': {'required': True}, } _attribute_map = { 'source': {'key': 'source', 'type': 'ConnectionMonitorSour
ce'}, 'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'}, 'auto_start': {'key': 'autoStart', 'type': 'bool'}, 'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'}, } def __init__(self, **kwargs): super(ConnectionMonitorParameters, self).__init__(**kwargs) self.source = kwargs.get('source', None) self.destination = kwargs.get('destination', None) self.auto_start = kwargs.get('auto_start', True) self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError(
"Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH envir
onment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
import numpy as np import pandas as pd import pickle # Return 0 or 1 based on whether Course fulfills a General Education Requirement def lookupGenEd(cNum, college): fileName = "data/Dietrich Gen Eds.csv" picklepath = "data\\dietrich_gen_eds.p" t
ry: with open(picklepath,'rb') as file:
gen_eds = pickle.load(file) except: df = pd.read_csv(fileName,names=['Dept','Num','Title','1','2']) gen_eds = set(df['Dept'].values) with open(picklepath,'wb') as file: pickle.dump(gen_eds,file) return cNum in gen_eds ''' genEdubility = lookupGenEd(73100, "dietrich") print("73100") print('Is Gen Ed?:', genEdubility) print() genEdubility = lookupGenEd(70100, "tepper") print("70100") print('Is Gen Ed?:', genEdubility) print() genEdubility = lookupGenEd(15322, "scs") print("15322") print('Is Gen Ed?:', genEdubility) print() '''
#!/usr/bin/env python3 # -*-
coding: utf-8 -*- """ Created on Wed Jun 7 21:16:09 2017 @author: immersinn """ import regex as re def billsummaryaref_matcher(tag): return tag.name =='a' and hasattr(tag, 'text') and tag.text == 'View Available Bill Summaries' def extract_links(soup): """Extract Bill Text Links from Bill Page""" billtext_links = [] target_a = soup.find_all(billsummaryaref_matcher) if len(target_a) == 1: target_a = target_a[0] cont
ent_table = target_a.parent.parent.parent for row in content_table.find_all('tr')[2:]: row_info = {} arefs = row.find_all('td')[0].find_all('a') for a in arefs: if a.text == 'HTML': row_info['html'] = a['href'] else: row_info['label'] = a.text.encode('utf8').replace(b'\xc2\xa0', b' ').decode('utf8') row_info['pdf'] = a['href'] billtext_links.append(row_info) return billtext_links def extract_meta(soup): """Extract the(select) a bout the Bill Info Page""" chamber_re = re.compile(r"(?:(?<=Chamber=))(H|S)") userid_re = re.compile(r"(?:(?<=UserID=))([0-9]+)") meta = {} for kw in ["Sponsors","Counties", "Statutes", "Keywords"]: tr = soup.find('th', text=kw + ':').parent content = tr.find('td') if kw=='Sponsors': spons = content.find_all('a') spons_list = [] for a in spons: hr = a['href'] spons_list.append({'userid' : userid_re.findall(hr)[0], 'chamber' : chamber_re.findall(hr)[0]}) meta[kw] = spons_list elif kw in ['Counties', 'Keywords', 'Statutes']: meta[kw] = content.text.split(', ') else: meta[kw] = content.text if kw == 'Counties' and \ meta[kw][0].lower().strip() == 'no counties specifically cited': meta[kw] = None if kw == 'Statutes' and \ meta[kw][0].lower().strip() == 'no affected general statutes': meta[kw] = None return meta
#!/usr/bin/env python # # VM Backup extension # # Copyright 2015 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.7+ # import inspect import os import sys import traceback from time import sleep scriptdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) maindir = os.path.abspath(os.path.join(scriptdir, '../../')) sys.path.append(maindir) transitionsdir = os.path.abspath(os.path.join(scriptdir, '../../transitions')) sys.path.append(transitionsdir) from oscrypto import * from encryptstates import * from Common import * from CommandExecutor import * from DiskUtil import * from transitions import * class Ubuntu1604EncryptionStateMachine(OSEncryptionStateMachine): states = [ State(name='uninitialized'), State(name='prereq', on_enter='on_enter_state'), State(name='stripdown', on_enter='on_enter_state'), State(name='unmount_oldroot', on_enter='on_enter_state'), State(name='split_root_partition', on_enter='on_enter_state'), State(name='encrypt_block_device', on_enter='on_enter_state'), State(name='patch_boot_system', on_enter='on_enter_state'), State(name='completed'), ] transitions = [ { 'trigger': 'skip_encryption', 'source': 'uninitialized', 'dest': 'completed' }, { 'trigger': 'enter_prereq', 'source': 'uninitialized', 'dest': 'prereq' }, { 'trigger': 'enter_stripdown', 'source': 'prereq', 'dest': 'stripdown', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'enter_unmount_oldroot', 'source': 'stripdown', 'dest': 'unmount_oldroot', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'retry_unmount_oldroot', 'source': 'unmount_oldroot', 'dest': 'unmount_oldroot', 'before': 'on_enter_state' }, { 'trigger': 'enter_split_root_partition', 'source': 'unmount_oldroot', 'dest': 'split_root_partition', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'enter_encrypt_block_device', 'source': 'split_root_partition', 'dest': 'encrypt_block_device', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'enter_patch_boot_system', 'source': 'encrypt_block_device', 'dest': 'patch_boot_system', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'stop_machine', 'source'
: 'patch_boot_system', 'dest': 'completed', 'conditions': 'should_exit_previous_state' }, ] def on_enter_state(self): super(Ubuntu1604EncryptionStateMachine, self).on_enter_state() def should_exit_previous_state(self): # when this is called, self.state is still the "source" state in the transition return supe
r(Ubuntu1604EncryptionStateMachine, self).should_exit_previous_state() def __init__(self, hutil, distro_patcher, logger, encryption_environment): super(Ubuntu1604EncryptionStateMachine, self).__init__(hutil, distro_patcher, logger, encryption_environment) self.state_objs = { 'prereq': PrereqState(self.context), 'stripdown': StripdownState(self.context), 'unmount_oldroot': UnmountOldrootState(self.context), 'split_root_partition': SplitRootPartitionState(self.context), 'encrypt_block_device': EncryptBlockDeviceState(self.context), 'patch_boot_system': PatchBootSystemState(self.context), } self.state_machine = Machine(model=self, states=Ubuntu1604EncryptionStateMachine.states, transitions=Ubuntu1604EncryptionStateMachine.transitions, initial='uninitialized') def start_encryption(self): proc_comm = ProcessCommunicator() self.command_executor.Execute(command_to_execute="mount", raise_exception_on_failure=True, communicator=proc_comm) if '/dev/mapper/osencrypt' in proc_comm.stdout: self.logger.log("OS volume is already encrypted") self.skip_encryption() self.log_machine_state() return self.log_machine_state() self.enter_prereq() self.log_machine_state() self.enter_stripdown() self.log_machine_state() oldroot_unmounted_successfully = False attempt = 1 while not oldroot_unmounted_successfully: self.logger.log("Attempt #{0} to unmount /oldroot".format(attempt)) try: if attempt == 1: self.enter_unmount_oldroot() elif attempt > 10: raise Exception("Could not unmount /oldroot in 10 attempts") else: self.retry_unmount_oldroot() self.log_machine_state() except Exception as e: message = "Attempt #{0} to unmount /oldroot failed with error: {1}, stack trace: {2}".format(attempt, e, traceback.format_exc()) self.logger.log(msg=message) self.hutil.do_status_report(operation='EnableEncryptionOSVolume', status=CommonVariables.extension_error_status, status_code=str(CommonVariables.unmount_oldroot_error), message=message) sleep(10) if attempt > 10: raise Exception(message) else: oldroot_unmounted_successfully = True finally: attempt += 1 self.enter_split_root_partition() self.log_machine_state() self.enter_encrypt_block_device() self.log_machine_state() self.enter_patch_boot_system() self.log_machine_state() self.stop_machine() self.log_machine_state() self._reboot()
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2016, Cumulus Networks <ce-ceng@cumulusnetworks.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['deprecated'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cl_ports version_added: "2.1" author: "Cumulus Networks (@CumulusNetworks)" short_description: Configure Cumulus Switch port attributes (ports.conf) deprecated: removed_in: "2.5" why: The M(nclu) module is designed to be easier to use for individuals who are new to Cumulus Linux by exposing the NCLU interface in an automatable way. alternative: Use M(nclu) instead. description: - Set the initial port attribute defined in the Cumulus Linux ports.conf, file. This module does not do any error checking at the moment. Be careful to not include ports that do not exist on the switch. Carefully read the original ports.conf file for any exceptions or limitations. For more details go the Configure Switch Port Attribute Documentation at U(http://docs.cumulusnetworks.com). options: speed_10g: descripti
on:
- List of ports to run initial run at 10G. speed_40g: description: - List of ports to run initial run at 40G. speed_4_by_10g: description: - List of 40G ports that will be unganged to run as 4 10G ports. speed_40g_div_4: description: - List of 10G ports that will be ganged to form a 40G port. ''' EXAMPLES = ''' # Use cl_ports module to manage the switch attributes defined in the # ports.conf file on Cumulus Linux ## Unganged port configuration on certain ports - name: configure ports.conf setup cl_ports: speed_4_by_10g: - swp1 - swp32 speed_40g: - swp2-31 ## Unganged port configuration on certain ports - name: configure ports.conf setup cl_ports: speed_4_by_10g: - swp1-3 - swp6 speed_40g: - swp4-5 - swp7-32 ''' RETURN = ''' changed: description: whether the interface was changed returned: changed type: bool sample: True msg: description: human-readable report of success or failure returned: always type: string sample: "interface bond0 config updated" ''' from ansible.module_utils.common.removed import removed_module if __name__ == '__main__': removed_module()
import numpy as np def gauss(win, sigma): x = np.arange(0, win, 1, float) y = x[:,np.newaxis] x0 = y0 = win // 2 g=1/(2*np.pi*sigma**2)*np.exp((((x-x0)**2+(y-y0)**2))/2*sigma**2) return g
def gaussx(win, sigma): x = np.arange(0, win, 1, float) y = x[:,np.newaxis] x0 = y0 = win // 2 gx=(x-x0)/(2*np.pi*sigma**4)*np.exp((((x-x0)**2+(y-y0)**2))/2*sigma**2) return gx def gaussy(win, sigma): x = np.arange(0, win, 1, float) y = x[:,np.newaxis] x0 = y0 = win // 2 gy=(y-y0)/(2*np.pi*sigma**4)*np.exp((((x-x0)**2+(y-y0)**2))/2*sigma**2)
return gy
self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue((x_val * y_val) & 0xffffffff == z_val) def test_div(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x / y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(x_val / y_val == z_val) def test_mod(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun(
"x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x % y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self.
_solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(x_val % y_val == z_val) def test_neg(self): x = BitVec(32, "x") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("z", z) self._solver.add(-x == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) z_val = self._solver.get_value(z) self.assertTrue(-x_val & 0xffffffff == z_val) # Bitwise operations. def test_and(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x & y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(x_val & y_val == z_val) def test_xor(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x ^ y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(x_val ^ y_val == z_val) def test_or(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x | y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(x_val | y_val == z_val) def test_lshift(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x << y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue((x_val << y_val) & 0xffffffff == z_val) def test_rshift(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(x >> y == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self._solver.add(x != y) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(x_val >> y_val == z_val) def test_invert(self): x = BitVec(32, "x") y = BitVec(32, "y") z = BitVec(32, "z") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.declare_fun("z", z) self._solver.add(~x == z) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) z_val = self._solver.get_value(z) self.assertTrue(~x_val & 0xffffffff == z_val) # Comparison operators (signed) def test_lt(self): x = BitVec(32, "x") y = BitVec(32, "y") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.add(x < y) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) self.assertTrue(x_val < y_val) def test_le(self): x = BitVec(32, "x") y = BitVec(32, "y") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.add(x <= y) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) self.assertTrue(x_val <= y_val) def test_eq(self): x = BitVec(32, "x") y = BitVec(32, "y") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.add(x == y) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) self.assertTrue(x_val == y_val) def test_neq(self): x = BitVec(32, "x") y = BitVec(32, "y") self._solver.declare_fun("x", x) self._solver.declare_fun("y", y) self._solver.add(x != y) # Add constraints to avoid trivial solutions. self._solver.add(x > 1) self._solver.add(y > 1) self.assertEqual(self._solver.check(), "sat") x_val = self._solver.get_value(x) y_val = self._solver.get_value(y) self.assertTrue(x_val != y_val) def test_gt(self): x = BitVec(32, "x") y = BitVec(32, "y") self._solver.declare_fun("x"
#!/usr/bin/env python ############################################################################### # $Id: sgi.py 31335 2015-11-04 00:17:39Z goatbar $ # # Project: GDAL/OGR Test Suite # Purpose: PNM (Portable Anyware Map) Testing. # Author: Frank Warmerdam <warmerdam@pobox.com> # ############################################################################### # Copyright (c) 2007, Frank Warmerdam <warmerdam@pobox.com> # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associate
d documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission n
otice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. ############################################################################### import sys sys.path.append( '../pymod' ) import gdaltest ############################################################################### # Read existing simple 1 band SGI file. def sgi_1(): tst = gdaltest.GDALTest( 'SGI', 'byte.sgi', 1, 4672 ) return tst.testOpen() ############################################################################### # Write Test grayscale def sgi_2(): tst = gdaltest.GDALTest( 'SGI', 'byte.tif', 1, 4672 ) return tst.testCreate() ############################################################################### # Write Test rgb def sgi_3(): tst = gdaltest.GDALTest( 'SGI', 'rgbsmall.tif', 2, 21053 ) return tst.testCreate() gdaltest_list = [ sgi_1, sgi_2, sgi_3, ] if __name__ == '__main__': gdaltest.setup_run( 'SGI' ) gdaltest.run_tests( gdaltest_list ) gdaltest.summarize()
import json import os import avasdk from zipfile import ZipFile, BadZipFile from avasdk.plugins.manifest import validate_manifest from avasdk.plugins.hasher import hash_plugin from django import forms from django.core.validators import ValidationError from .validators import ZipArchiveValidator class PluginArchiveField(forms.FileField): default_validators = [ZipArchiveValidator()] label = 'Plugin .zip' def get_prefix(self, archive): files = archive.namelist() return os.path.commonpath(files) def get_manifest(self, archive): try: with ZipFile(archive.temporary_file_path()) as plugin: prefix = self.get_prefix(plugin) prefix = prefix + '/' if len(prefix) else '' with plugin.open('{}manifest.json'.format(prefix)) as myfile: manifest = json.loads(myfile.read()) validate_manifest(manifest) return manifest except BadZipFile: raise ValidationError('Bad .zip format') except FileNotFoundError: raise ValidationError('Error with upload, please try again') except KeyError: raise ValidationError('No manifest.json found in archive') except json.JSONDecodeError: raise ValidationError('Error with manifest.json, bad Json Format') except avasdk.exceptions.ValidationError as e: raise ValidationError('Error in manifest.json ({})'.format(e)) def get_readme(self, archive): try: with ZipFile(archive.temporary_f
ile_path()) as plugin: prefix = self.get_prefix(plugin) prefix = prefix + '/' if len(prefix) else '' with plugin.open('{}/README.md'.format(prefix)) as myfile: readme = myfile.read() return readme except FileNotFoundError: raise ValidationError('Error with upload
, please try again') except KeyError: return None def clean(self, data, initial=None): f = super().clean(data, initial) manifest = self.get_manifest(f) readme = self.get_readme(f) return { 'zipfile': f, 'manifest': manifest, 'readme': readme, 'checksum': hash_plugin(f.temporary_file_path()), } class UploadPluginForm(forms.Form): archive = PluginArchiveField()
#author: Tobias Andermann, tobias.andermann@bioenv.gu.se import os import sys import re import glob import shutil import argparse from Bio import SeqIO from .utils import CompletePath # Get arguments def get_args(): parser = argparse.ArgumentParser( description="Set the maximum fraction of missing data that you want to allow in an alignment and drop all sequences above this threshold.", formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( '--alignment', required=True, action=CompletePath, default=None, help='The alignment in fasta format.' ) parser.add_argument( '--maximum_missing', type=float, default=0.8, help='Define the maximal fraction of missing data that you want to allow. All sequences below this threshold will be exported into a new alignment.' ) parser.add_argument( '--output', required=True, action=CompletePath, default=None, help='The output directory where results will be safed.' ) return parser.parse_args() args = get_args() # Set working directory out_dir = args.output if not os.path.exists(out_dir): os.makedirs(out_dir) # Get other input variables alignment = args.alignment max_mis = args.maximum_missing def manage_homzygous_samples(fasta,threshold,output): fasta_alignment = SeqIO.parse(open(fasta),'fasta') with open('%s/cleaned_alignment_all_sequences_less_than_%f_missing_data.fasta' %(output,threshold), 'w') as outfile: final_seqs = {} for sample in fasta_alignment: header = sample.description sequence = sample.seq chars = list(sequence) bad_chars = [] for char in chars: if char not in ['A','C','T','G','a','c','t','g']: bad_chars.append(char) sequence_length = float(len(chars)) count_bad_chars = float(len(bad_chars)) fraction = float(count_bad_chars/sequence_length) if fraction <= threshold:
final_seqs.setdefault(header,[]).append(sequence) else: print("Dropped sequence for", header) for seqnam
e, seq in final_seqs.items(): sequence = str(seq[0]) outfile.write(">"+seqname+"\n") outfile.write(sequence+"\n") outfile.close manage_homzygous_samples(alignment,max_mis,out_dir)
# Copyright (C)2016 D. Plaindoux. # # This program is free software; you can redistribute it and/or modify it # under
the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 2, or (at your option) any # later version. import unittest import path_parse_test import path_match_test import provider_test import verb_test import mime_test import inspection_test import wsgi_test if __
name__ == '__main__': suite = unittest.TestSuite() suite.addTest(path_parse_test.suite()) suite.addTest(path_match_test.suite()) suite.addTest(verb_test.suite()) suite.addTest(mime_test.suite()) suite.addTest(provider_test.suite()) suite.addTest(inspection_test.suite()) suite.addTest(wsgi_test.suite()) unittest.TextTestRunner(verbosity=2).run(suite)
from django.apps import AppConfig cla
ss QsiteConfig(AppConfig):
name = 'qsite' verbose_name = '站点管理'
import json from httpretty import HTTPretty from social.p3 import urlencode from social.tests.backends.oauth import OAuth1Test class YahooOAuth1Test(OAuth1Test): backend_path = 'social.backends.yahoo.YahooOAuth' us
er_data_url = 'https://social.yahooapis.com/v1/user/a-guid/profile?' \ 'format=json' expected_username = 'foobar' access_token_body = js
on.dumps({ 'access_token': 'foobar', 'token_type': 'bearer' }) request_token_body = urlencode({ 'oauth_token_secret': 'foobar-secret', 'oauth_token': 'foobar', 'oauth_callback_confirmed': 'true' }) guid_body = json.dumps({ 'guid': { 'uri': 'https://social.yahooapis.com/v1/me/guid', 'value': 'a-guid' } }) user_data_body = json.dumps({ 'profile': { 'bdRestricted': True, 'memberSince': '2007-12-11T14:40:30Z', 'image': { 'width': 192, 'imageUrl': 'http://l.yimg.com/dh/ap/social/profile/' 'profile_b192.png', 'size': '192x192', 'height': 192 }, 'created': '2013-03-18T04:15:08Z', 'uri': 'https://social.yahooapis.com/v1/user/a-guid/profile', 'isConnected': False, 'profileUrl': 'http://profile.yahoo.com/a-guid', 'guid': 'a-guid', 'nickname': 'foobar' } }) def test_login(self): HTTPretty.register_uri( HTTPretty.GET, 'https://social.yahooapis.com/v1/me/guid?format=json', status=200, body=self.guid_body ) self.do_login() def test_partial_pipeline(self): self.do_partial_pipeline()
# -*- coding: utf-8 -*- from __future__ import unicode_literals import six import unittest2 from robottelo.ui.location import Location from robottelo.ui.locators import common_locators from robottelo.ui.locators import locators if six.PY2: import mock else: from unittest import mock class LocationTestCase(unittest2.TestCase): def test_creation_without_parent_and_without_unassigned_host(self): location = Location(None) location.click = mock.Mock() location.assign_value = mock.Mock() location.wait_until_element = mock.Mock(return_value=None) location._configure_location = mock.Mock() location.select = mock.Mock() location.create('foo') click_calls = [ mock.call(locators['location.new']), mock.call(common_locators['submit']), mock.call(common_locators['submit']) ] self.assertEqual(3, location.click.call_count) location.click.assert_has_calls(click_calls, any_order=False) location.assign_value.assert_called_once_with( locators['location.name'], 'foo') # not called if parent is None location.select.assert_not_called() location._configure_location.assert_called_once_with( capsules=None, all_capsules=None, domains=None, envs=None, hostgroups=None, medias=None, organizations=None, ptables=None, resources=None, select=True, subnets=None, templates=N
one, users=None, params=None ) def test_creation_with_parent_and_unassigned_host(self): location = Location(None) location.click = mock.Mock() location.assign_value = mock.Mock() location.wait_until_element = mock.Mock() location._configure_location = mock.Mock() location.select = mock.Mock() configure_arguments = { arg: arg for arg in 'capsu
les all_capsules domains hostgroups medias organizations ' 'envs ptables resources select subnets templates users params ' 'select'.split() } location.create('foo', 'parent', **configure_arguments) click_calls = [ mock.call(locators['location.new']), mock.call(common_locators['submit']), mock.call(locators['location.proceed_to_edit']), mock.call(common_locators['submit']) ] self.assertEqual(4, location.click.call_count) location.click.assert_has_calls(click_calls, any_order=False) location.assign_value.assert_called_once_with( locators['location.name'], 'foo') # called only if parent is not None location.select.assert_called_once_with( locators['location.parent'], 'parent' ) location._configure_location.assert_called_once_with( **configure_arguments)
# Copyright 2020 Mycroft AI Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for
the specific language governing permissions and # limitations under the License. # from glob import glob import json from pathlib import Path import sys """Convert existing intent tests to behave tests.""" TEMPLATE = """ Scenario: {scenario} Given an english speaking user When the user says "{utterance}" Then "{skill}" should reply with dialog from "{dialog_file}.dialog" """ def json_files(path): """Generator function returning paths of all json files in a folder.""" for json_file in sorted(glob(str(Path(path, '*.json')))): yield Path(json_file) def generate_feature(skill, skill_path): """Generate a feature file provided a skill name and a path to the skill. """ test_path = Path(skill_path, 'test', 'intent') case = [] if test_path.exists() and test_path.is_dir(): for json_file in json_files(test_path): with open(str(json_file)) as test_file: test = json.load(test_file) if 'utterance' and 'expected_dialog' in test: utt = test['utterance'] dialog = test['expected_dialog'] # Simple handling of multiple accepted dialogfiles if isinstance(dialog, list): dialog = dialog[0] case.append((json_file.name, utt, dialog)) output = '' if case: output += 'Feature: {}\n'.format(skill) for c in case: output += TEMPLATE.format(skill=skill, scenario=c[0], utterance=c[1], dialog_file=c[2]) return output if __name__ == '__main__': print(generate_feature(*sys.argv[1:]))
from django import template import datetime register = template.Library() # https://stackoverflow.com/a/8907269/2226755 def strfdelta(tdelta, fmt): d = {"days": tdelta.days} d["hours"], rem = divmod(tdelta.seconds, 3600) d["minutes"], d["seconds"] = divmod(rem, 60) return fmt.format(**d) # TODO add unit test @register.filter("seconds_to_duration") def seconds_to_duration(value): """ Display a human-readable reading-time (or any other duration) from a duration in seconds. """ if value <= 0: return
"" duration = dateti
me.timedelta(seconds=value) if datetime.timedelta(hours=1) > duration: return strfdelta(duration, "{minutes}m{seconds}s") else: return strfdelta(duration, "{hours}h{minutes}m{seconds}s")
import olymap.skill def test_learn_time(): tests = ( ({}, None), ({'SK': {'tl': ['14']}}, '14'), ({'SK': {'an': ['0']}}, None), ({'IT': {'tl': ['1']}}, None),
({'SK': {'an': ['1']}}, None), ) for box, answer in tests: assert olymap.skill.get_learn_time(box) == answer def test_get_required_s
kill(): tests = ( ({}, None), ({'SK': {'rs': ['632']}}, {'id': '632', 'oid': '632', 'name': 'Determine inventory of character'}), ({'SK': {'rs': ['630']}}, {'id': '630', 'oid': '630', 'name': 'Stealth'}), ({'SK': {'re': ['632']}}, None), ({'SL': {'rs': ['632']}}, None), ) data = {'630': {'firstline': ['630 skill 0'], 'na': ['Stealth'], 'SK': {'tl': ['28'], 'of': ['631', '632', '633', '634', '635'], 're': ['636', '637', '638', '639']}}, '632': {'firstline': ['632 skill 0'], 'na': ['Determine inventory of character'], 'SK': {'tl': ['14'], 'rs': ['630']}}} for box, answer in tests: assert olymap.skill.get_required_skill(box, data) == answer
fro
m Model import * import MemoryDecay # Note we cannot import TwoConcepts here because that ends up modifying the grammar, ruining it fo
r example loaders
mport string import tempfile import shutil import threading import exceptions import errno from collections import defaultdict from xml.etree import ElementTree import nixops.statefile import nixops.backends import nixops.logger import nixops.parallel from nixops.nix_expr import RawValue, Function, Call, nixmerge, py2nix import re from datetime import datetime, timedelta import getpass import traceback import glob import fcntl import itertools import platform from nixops.util import ansi_success import inspect import time class NixEvalError(Exception): pass class UnknownBackend(Exception): pass debug = False class Deployment(object): """NixOps top-level deployment manager.""" default_description = "Unnamed NixOps network" name = nixops.util.attr_property("name", None) nix_exprs = nixops.util.attr_property("nixExprs", [], 'json') nix_path = nixops.util.attr_property("nixPath", [], 'json') args = nixops.util.attr_property("args", {}, 'json') description = nixops.util.attr_property("description", default_description) configs_path = nixops.util.attr_property("configsPath", None) rollback_enabled = nixops.util.attr_property("rollbackEnabled", False) datadog_notify = nixops.util.attr_property("datadogNotify", False, bool) datadog_event_info = nixops.util.attr_property("datadogEventInfo", "") datadog_tags = nixops.util.attr_property("datadogTags", [], 'json') # internal variable to mark if network attribute of network has been evaluated (separately) network_attr_eval = False def __init__(self, statefile, uuid, log_file=sys.stderr): self._statefile = statefile self._db = statefile._db self.uuid = uuid self._last_log_prefix = None self.extra_nix_path = [] self.extra_nix_flags = [] self.extra_nix_eval_flags = [] self.nixos_version_suffix = None self._tempdir = None self.logger = nixops.logger.Logger(log_file) self._lock_file_path = None self.expr_path = os.path.realpath(os.path.dirname(__file__) + "/../../../../share/nix/nixops") if not os.path.exists(self.expr_path): self.expr_path = os.path.realpath(os.path.dirname(__file__) + "/..
/../../../../share/nix/nixops")
if not os.path.exists(self.expr_path): self.expr_path = os.path.dirname(__file__) + "/../nix" self.resources = {} with self._db: c = self._db.cursor() c.execute("select id, name, type from Resources where deployment = ?", (self.uuid,)) for (id, name, type) in c.fetchall(): r = _create_state(self, type, name, id) self.resources[name] = r self.logger.update_log_prefixes() self.definitions = None @property def tempdir(self): if not self._tempdir: self._tempdir = nixops.util.SelfDeletingDir(tempfile.mkdtemp(prefix="nixops-tmp")) return self._tempdir @property def machines(self): return {n: r for n, r in self.resources.items() if is_machine(r)} @property def active(self): # FIXME: rename to "active_machines" return {n: r for n, r in self.resources.items() if is_machine(r) and not r.obsolete} @property def active_resources(self): return {n: r for n, r in self.resources.items() if not r.obsolete} def get_typed_resource(self, name, type): res = self.active_resources.get(name, None) if not res: raise Exception("resource ‘{0}’ does not exist".format(name)) if res.get_type() != type: raise Exception("resource ‘{0}’ is not of type ‘{1}’".format(name, type)) return res def get_machine(self, name): res = self.active_resources.get(name, None) if not res: raise Exception("machine ‘{0}’ does not exist".format(name)) if not is_machine(res): raise Exception("resource ‘{0}’ is not a machine".format(name)) return res def _set_attrs(self, attrs): """Update deployment attributes in the state file.""" with self._db: c = self._db.cursor() for n, v in attrs.iteritems(): if v == None: c.execute("delete from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, n)) else: c.execute("insert or replace into DeploymentAttrs(deployment, name, value) values (?, ?, ?)", (self.uuid, n, v)) def _set_attr(self, name, value): """Update one deployment attribute in the state file.""" self._set_attrs({name: value}) def _del_attr(self, name): """Delete a deployment attribute from the state file.""" with self._db: self._db.execute("delete from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, name)) def _get_attr(self, name, default=nixops.util.undefined): """Get a deployment attribute from the state file.""" with self._db: c = self._db.cursor() c.execute("select value from DeploymentAttrs where deployment = ? and name = ?", (self.uuid, name)) row = c.fetchone() if row != None: return row[0] return nixops.util.undefined def _create_resource(self, name, type): c = self._db.cursor() c.execute("select 1 from Resources where deployment = ? and name = ?", (self.uuid, name)) if len(c.fetchall()) != 0: raise Exception("resource already exists in database!") c.execute("insert into Resources(deployment, name, type) values (?, ?, ?)", (self.uuid, name, type)) id = c.lastrowid r = _create_state(self, type, name, id) self.resources[name] = r return r def export(self): with self._db: c = self._db.cursor() c.execute("select name, value from DeploymentAttrs where deployment = ?", (self.uuid,)) rows = c.fetchall() res = {row[0]: row[1] for row in rows} res['resources'] = {r.name: r.export() for r in self.resources.itervalues()} return res def import_(self, attrs): with self._db: for k, v in attrs.iteritems(): if k == 'resources': continue self._set_attr(k, v) for k, v in attrs['resources'].iteritems(): if 'type' not in v: raise Exception("imported resource lacks a type") r = self._create_resource(k, v['type']) r.import_(v) def clone(self): with self._db: new = self._statefile.create_deployment() self._db.execute("insert into DeploymentAttrs (deployment, name, value) " + "select ?, name, value from DeploymentAttrs where deployment = ?", (new.uuid, self.uuid)) new.configs_path = None return new def _get_deployment_lock(self): if self._lock_file_path is None: lock_dir = os.environ.get("HOME", "") + "/.nixops/locks" if not os.path.exists(lock_dir): os.makedirs(lock_dir, 0700) self._lock_file_path = lock_dir + "/" + self.uuid class DeploymentLock(object): def __init__(self, depl): self._lock_file_path = depl._lock_file_path self._logger = depl.logger self._lock_file = None def __enter__(self): self._lock_file = open(self._lock_file_path, "w") fcntl.fcntl(self._lock_file, fcntl.F_SETFD, fcntl.FD_CLOEXEC) try: fcntl.flock(self._lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: self._logger.log( "waiting for exclusive deployment lock..." ) fcntl.flock(self._lock_file, fcntl.LOCK_EX) def __exit__(self, exception_type, exception_value, exception_traceback): self._lock_file.cl
meter_name, None) def lookups(self, request, model_admin): """ Must be overridden to return a list of tuples (value, verbose value) """ raise NotImplementedError( 'The SimpleListFilter.lookups() method must be overridden to ' 'return a list of tuples (value, verbose value)') def expected_parameters(self): return [self.parameter_name] def choices(self, cl): yield { 'selected': self.value() is None, 'query_string': cl.get_query_string({}, [self.parameter_name]), 'display': _('All'), } for lookup, title in self.lookup_choices: yield { 'selected': self.value() == force_text(lookup), 'query_string': cl.get_query_string({ self.parameter_name: lookup, }, []), 'display': title, } class FieldListFilter(ListFilter): _field_list_filters = [] _take_priority_index = 0 def __init__(self, field, request, params, model, model_admin, field_path): self.field = field self.field_path = field_path self.title = getattr(field, 'verbose_name', field_path) super(FieldListFilter, self).__init__( request, params, model, model_admin) for p in self.expected_parameters(): if p in params: value = params.pop(p) self.used_parameters[p] = prepare_lookup_value(p, value) def has_output(self): return True def queryset(self, request, queryset): try: return queryset.filter(**self.used_parameters) except ValidationError as e: raise IncorrectLookupParameters(e) @classmethod def register(cls, test, list_filter_class, take_priority=False): if take_priority: # This is to allow overriding the default filters for certain types # of fields with some custom filters. The first found in the list # is used in priority. cls._field_list_filters.insert( cls._take_priority_index, (test, list_filter_class)) cls._take_priority_index += 1 else: cls._field_list_filters.append((test, list_filter_class)) @classmethod def create(cls, field, request, params, model, model_admin, field_path): for test, list_filter_class in cls._field_list_filters: if not test(field): continue return list_filter_class(field, request, params, model, model_admin, field_path=field_path) class RelatedFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): other_model = get_model_from_relation(field) if hasattr(field, 'rel'): rel_name = field.rel.get_related_field().name else: rel_name = other_model._meta.pk.name self.lookup_kwarg = '%s__%s__exact' % (field_path, rel_name) self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = request.GET.get(self.lookup_kwarg) self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull) self.lookup_choices = self.field_choices(field, request, model_admin) super(RelatedFieldListFilter, self).__init__( field, request, params, model, model_admin, field_path) if hasattr(field, 'verbose_name'): self.lookup_title = field.verbose_name else: self.lookup_title = other_model._meta.verbose_name self.title = self.lookup_title def has_output(self): if (isinstance(self.field, ForeignObjectRel) and self.field.field.null or hasattr(self.field, 'rel') and
self.field.null): extra = 1 else:
extra = 0 return len(self.lookup_choices) + extra > 1 def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def field_choices(self, field, request, model_admin): return field.get_choices(include_blank=False) def choices(self, cl): from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE yield { 'selected': self.lookup_val is None and not self.lookup_val_isnull, 'query_string': cl.get_query_string({}, [self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All'), } for pk_val, val in self.lookup_choices: yield { 'selected': self.lookup_val == smart_text(pk_val), 'query_string': cl.get_query_string({ self.lookup_kwarg: pk_val, }, [self.lookup_kwarg_isnull]), 'display': val, } if (isinstance(self.field, ForeignObjectRel) and (self.field.field.null or isinstance(self.field.field, ManyToManyField)) or hasattr(self.field, 'rel') and (self.field.null or isinstance(self.field, ManyToManyField))): yield { 'selected': bool(self.lookup_val_isnull), 'query_string': cl.get_query_string({ self.lookup_kwarg_isnull: 'True', }, [self.lookup_kwarg]), 'display': EMPTY_CHANGELIST_VALUE, } FieldListFilter.register(lambda f: ( bool(f.rel) if hasattr(f, 'rel') else isinstance(f, ForeignObjectRel)), RelatedFieldListFilter) class BooleanFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__exact' % field_path self.lookup_kwarg2 = '%s__isnull' % field_path self.lookup_val = request.GET.get(self.lookup_kwarg, None) self.lookup_val2 = request.GET.get(self.lookup_kwarg2, None) super(BooleanFieldListFilter, self).__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg2] def choices(self, cl): for lookup, title in ( (None, _('All')), ('1', _('Yes')), ('0', _('No'))): yield { 'selected': self.lookup_val == lookup and not self.lookup_val2, 'query_string': cl.get_query_string({ self.lookup_kwarg: lookup, }, [self.lookup_kwarg2]), 'display': title, } if isinstance(self.field, models.NullBooleanField): yield { 'selected': self.lookup_val2 == 'True', 'query_string': cl.get_query_string({ self.lookup_kwarg2: 'True', }, [self.lookup_kwarg]), 'display': _('Unknown'), } FieldListFilter.register(lambda f: isinstance(f, (models.BooleanField, models.NullBooleanField)), BooleanFieldListFilter) class ChoicesFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__exact' % field_path self.lookup_val = request.GET.get(self.lookup_kwarg) super(ChoicesFieldListFilter, self).__init__( field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg] def choices(self, cl): yield { 'selected': self.lookup_val is None, 'query_string': cl.get_query_string({}, [self.lookup_kwarg]), 'display': _('All') } for lookup, title in self.field.flatchoices: yield { 'selected': smart_text(lookup) == self.lookup_val, 'query_string': cl.get_query_string({ self.lookup_kwarg: lookup}), 'display': title, } FieldListFilter.register(lambda f: bool(f.choices), ChoicesFieldListFilter) class DateFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path):
from django.contrib import admin from rawParser.models import flightSearch #
Register your models here. admin.site.register(flig
htSearch)
# -*- coding: utf-8 -*- # # Copyright (c) 2016-2017 Ircam # Copyright (c) 2016-2017 Guillaume Pellerin # Copyright (c) 2016-2017 Emilie Zawadzki # This file is part of mezzanine-organization. # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is d
istributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTAB
ILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.
# -*- coding: utf-8 -*- # # powerschool_apps documentation build configuration file, created by # sphinx-quickstart. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from __future__ import unicode_literals import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'powerschool_apps' copyright = """2017, Iron County School District""" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'powerschool_appsdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpape
r', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'powerschool_apps.tex', 'powerschool_apps Documen
tation', """Iron County School District""", 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'powerschool_apps', 'powerschool_apps Documentation', ["""Iron County School District"""], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'powerschool_apps', 'powerschool_apps Documentation', """Iron County School District""", 'powerschool_apps', """PowerSchool customizations written in Django""", 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote'
from parabem.pan2d
import doublet_2_1 import parabem import numpy as np v1 = par
abem.PanelVector2(-1, 0) v2 = parabem.PanelVector2(1, 0) panel = parabem.Panel2([v2, v1]) vals = ([doublet_2_1(parabem.Vector2(x, 0), panel, True) for x in np.linspace(-2, 2, 20)]) print(vals)
__author__ = 'rls' class Robot: """Represents a robot, with a name.""" # A class variable, counting the number of robots population = 0 def __init__(self, name): """Initializes the data.""" self.name = name print('(Initializing {})'.format(self.name)) # When this person is created, the robot adds to the population Robot.population += 1 def die(self): """I am dying.""" print("{} is being destroyed!".format(self.name)) Robot.population -= 1 if Robot.population == 0: print("{} was the last one.".format(self.name)) else:
print("There are still {:d} robots working.".format(Robot.population)) def say_hi(self): """Greeting by the robot Long doc statement.""" print("Greetings, my masters have called me {}".format(self.name)) @classm
ethod def how_many(cls): """Prints the current population.""" print("We have {:d} robots.".format(cls.population)) droid1 = Robot("R2-D2") droid1.say_hi() Robot.how_many() __version__ = 0.2
None, bind=('_ui_mode',)) '''Defines tries to ascertain the kind of device the app is running on. Cane be one of `tablet` or `phone`. :data:`ui_mode` is a read only `AliasProperty` Defaults to 'phone' ''' def __init__(self, **kwargs): # initialize variables self._clipboard = Clipboard self.info_bubble = None self.nfcscanner = None self.tabs = None self.is_exit = False self.wallet = None self.pause_time = 0 self.asyncio_loop = asyncio.get_event_loop() App.__init__(self)#, **kwargs) title = _('Electrum App') self.electrum_config = config = kwargs.get('config', None) self.language = config.get('language', 'en') self.network = network = kwargs.get('network', None) # type: Network if self.network: self.num_blocks = self.network.get_local_height() self.num_nodes = len(self.network.get_interfaces()) net_params = self.network.get_parameters() self.server_host = net_params.host self.server_port = net_params.port self.auto_connect = net_params.auto_connect self.oneserver = net_params.oneserver self.proxy_config = net_params.proxy if net_params.proxy else {} self.update_proxy_str(self.proxy_config) self.plugins = kwargs.get('plugins', []) self.gui_object = kwargs.get('gui_object', None) self.daemon = self.gui_object.daemon self.fx = self.daemon.fx self.use_rbf = config.get('use_rbf', True) self.use_change = config.get('use_change', True) self.use_unconfirmed = not config.get('confirmed_only', False) # create triggers so as to minimize updating a max of 2 times a sec self._trigger_update_wallet = Clock.create_trigger(self.update_wallet, .5) self._trigger_update_status = Clock.create_trigger(self.update_status, .5) self._trigger_update_history = Clock.create_trigger(self.update_history, .5) self._trigger_update_interfaces = Clock.create_trigger(self.update_interfaces, .5) self._periodic_update_status_during_sync = Clock.schedule_interval(self.update_wallet_synchronizing_progress, .5) # cached dialogs self._settings_dialog = None self._password_dialog = None self.fee_status = self.electrum_config.get_fee_status() def on_pr(self, pr): if not self.wallet: self.show_error(_('No wallet loaded.')) return if pr.verify(self.wallet.contacts): key = self.wallet.invoices.add(pr) if self.invoices_screen: self.invoices_screen.update() status = self.wallet.invoices.get_status(key) if status == PR_PAID: self.show_error("invoice already paid") self.send_screen.do_clear() else: if pr.has_expired(): self.show_error(_('Payment request has expired')) else: self.switch_to('send') self.send_screen.set_request(pr) else: self.show_error("invoice error:" + pr.error) self.send_screen.do_clear() def on_qr(self, data): from electrum.bitcoin import base_decode, is_address data =
data.strip() if is_address(data): self.set_URI(data) return if data.startswith('fujicoin:'): self.set_URI(data) return
# try to decode transaction from electrum.transaction import Transaction from electrum.util import bh2u try: text = bh2u(base_decode(data, None, base=43)) tx = Transaction(text) tx.deserialize() except: tx = None if tx: self.tx_dialog(tx) return # show error self.show_error("Unable to decode QR data") def update_tab(self, name): s = getattr(self, name + '_screen', None) if s: s.update() @profiler def update_tabs(self): for tab in ['invoices', 'send', 'history', 'receive', 'address']: self.update_tab(tab) def switch_to(self, name): s = getattr(self, name + '_screen', None) if s is None: s = self.tabs.ids[name + '_screen'] s.load_screen() panel = self.tabs.ids.panel tab = self.tabs.ids[name + '_tab'] panel.switch_to(tab) def show_request(self, addr): self.switch_to('receive') self.receive_screen.screen.address = addr def show_pr_details(self, req, status, is_invoice): from electrum.util import format_time requestor = req.get('requestor') exp = req.get('exp') memo = req.get('memo') amount = req.get('amount') fund = req.get('fund') popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/invoice.kv') popup.is_invoice = is_invoice popup.amount = amount popup.requestor = requestor if is_invoice else req.get('address') popup.exp = format_time(exp) if exp else '' popup.description = memo if memo else '' popup.signature = req.get('signature', '') popup.status = status popup.fund = fund if fund else 0 txid = req.get('txid') popup.tx_hash = txid or '' popup.on_open = lambda: popup.ids.output_list.update(req.get('outputs', [])) popup.export = self.export_private_keys popup.open() def show_addr_details(self, req, status): from electrum.util import format_time fund = req.get('fund') isaddr = 'y' popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/invoice.kv') popup.isaddr = isaddr popup.is_invoice = False popup.status = status popup.requestor = req.get('address') popup.fund = fund if fund else 0 popup.export = self.export_private_keys popup.open() def qr_dialog(self, title, data, show_text=False, text_for_clipboard=None): from .uix.dialogs.qr_dialog import QRDialog def on_qr_failure(): popup.dismiss() msg = _('Failed to display QR code.') if text_for_clipboard: msg += '\n' + _('Text copied to clipboard.') self._clipboard.copy(text_for_clipboard) Clock.schedule_once(lambda dt: self.show_info(msg)) popup = QRDialog(title, data, show_text, failure_cb=on_qr_failure, text_for_clipboard=text_for_clipboard) popup.open() def scan_qr(self, on_complete): if platform != 'android': return from jnius import autoclass, cast from android import activity PythonActivity = autoclass('org.kivy.android.PythonActivity') SimpleScannerActivity = autoclass("org.electrum.qr.SimpleScannerActivity") Intent = autoclass('android.content.Intent') intent = Intent(PythonActivity.mActivity, SimpleScannerActivity) def on_qr_result(requestCode, resultCode, intent): try: if resultCode == -1: # RESULT_OK: # this doesn't work due to some bug in jnius: # contents = intent.getStringExtra("text") String = autoclass("java.lang.String") contents = intent.getStringExtra(String("text")) on_complete(contents) except Exception as e: # exc would otherwise get lost send_exception_to_crash_reporter(e) finally: activity.unbind(on_activity_result=on_qr_result) activity.bind(on_activity_result=on_qr_result) PythonActivity.mActivity.startActivityForResult(intent, 0) def do_share(self, data, title): if platform != 'android': return from jnius import autoclass, cast JS = autoclass('java.lang.String') Intent = autoclass('android.content.In
""" Order module has been split for its complexity. Proposed clean hierarchy for GASSupplierOrder that can be used in many contexts such as: DES: ChooseSupplier ChooseGAS ChooseReferrer GAS: ChooseSupplier OneGAS ChooseReferrer Supplier: OneSupplier ChooseGAS ChooseReferrer Solidal Pact: OneSupplier OneGAS ChooseReferrer * BaseOrderForm: base for add and edit | |---* AddOrderForm: encapsulate Add logic. | Just this class is enough if Resource API encapsulate | logic behind specific resource. Otherwise we need to write | subclasses XAddOrderForm where X is one of DES, GAS, Supplier, Pact. | | It manages: | * common attributes | * setting of withdrawal and deliveries | ----* EditOrderForm * PlannedAddOrderForm: mix-in class to add planning facilities #TODO LEFT OUT NOW InterGASAddOrderForm: it requires some considerations and #TODO LEFT OUT NOW so probably it should be managed as a separated module. #TODO LEFT OUT NOW P.e: deliveries and withdrawals MUST be always specified. #TODO LEFT OUT NOW It also would need multiple delivery and withdrawal places, #TODO LEFT OUT NOW but th
is will be a FUTURE module update Factory function `form_class_factory_for_request` is there for: * composition of final classes (XAddOrderForm, PlannedAddOrderForm, InterGASAddOrderForm) * follows GAS configuration options and prepare delivery and withdrawal fields Where can you find above classes: * base.BaseOrderForm * base.AddOrderForm * base.EditOrderForm * X.XAddOrd
erForm (where X can be des,gas,supplier,pact) * __init__.form_class_factory_for_request * extra.PlannedAddOrderForm #TODO LEFT OUT NOW * intergas.InterGASAddOrderForm There are also some other classes that support order interactions: * gmo.SingleGASMemberOrderForm * gmo.BasketGASMemberOrderForm * gsop.GASSupplierOrderProductForm """ from django import forms from django.utils.translation import ugettext, ugettext_lazy as _ from gf.base.models import Place, Person from lib.widgets import SplitDateTimeFormatAwareWidget from gf.gas.forms.order.base import AddOrderForm, EditOrderForm from gf.gas.forms.order.plan import AddPlannedOrderForm from gf.gas.forms.order.intergas import AddInterGASOrderForm, AddInterGASPlannedOrderForm from gf.gas.models import GASSupplierOrder import copy import logging log = logging.getLogger(__name__) def form_class_factory_for_request(request, base): """Return appropriate form class basing on GAS configuration and other request parameters if needed""" #log.debug("OrderForm--> form_class_factory_for_request") fields = copy.deepcopy(base.Meta.fields) gf_fieldsets = copy.deepcopy(base.Meta.gf_fieldsets) attrs = {} gas = request.resource.gas if gas: if gas.config.use_withdrawal_place: gf_fieldsets[0][1]['fields'].append('withdrawal_referrer_person') attrs.update({ 'withdrawal_referrer' : forms.ModelChoiceField( queryset=Person.objects.none(), required=False ), }) if gas.config.can_change_delivery_place_on_each_order: gf_fieldsets[0][1]['fields'].append(('delivery_city', 'delivery_addr_or_place')) attrs.update({ 'delivery_city' : forms.CharField(required=True, label=_('Delivery city'), initial=gas.city ), 'delivery_addr_or_place': forms.CharField( required=True, label=_('Delivery address or place'), initial=gas.headquarter ), }) if gas.config.use_withdrawal_place: if gas.config.can_change_withdrawal_place_on_each_order: gf_fieldsets[0][1]['fields'].append(( 'withdrawal_datetime', 'withdrawal_city', 'withdrawal_addr_or_place') ) attrs.update({ 'withdrawal_datetime' : forms.SplitDateTimeField( required=False, label=_('Withdrawal on/at'), widget=SplitDateTimeFormatAwareWidget ), 'withdrawal_city' : forms.CharField( required=True, label=_('Withdrawal city'), initial=gas.city ), 'withdrawal_addr_or_place': forms.CharField(required=True, label=_('Withdrawal address or place'), initial=gas.headquarter ), }) attrs.update(Meta=type('Meta', (), { 'model' : GASSupplierOrder, 'fields' : fields, 'gf_fieldsets' : gf_fieldsets })) return type('Custom%s' % base.__name__, (base,), attrs)
#!/usr/bin/env python # Copyright (C) 2008,2011 Lanedo GmbH # # Author: Tim Janik # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys, os, re, urllib, csv pkginstall_configvars = { 'PACKAGE' : 'dummy', 'PACKAGE_NAME' : 'dummy', 'VERSION' : '0.0', 'REVISION' : 'uninstalled', #@PKGINSTALL_CONFIGVARS_IN24LINES@ # configvars are substituted upon script installation } # TODO: # - support mixing in comments.txt which has "bug# person: task" bugurls = ( ('gb', 'http://bugzilla.gnome.org/buglist.cgi?bug_id='), ('gnome', 'http://bugzilla.gnome.org/buglist.cgi?bug_id='), ('fd', 'https://bugs.freedesktop.org/buglist.cgi?bug_id='), ('freedesktop', 'https://bugs.freedesktop.org/buglist.cgi?bug_id='), ('mb', 'https://bugs.maemo.org/buglist.cgi?bug_id='), ('maemo', 'https://bugs.maemo.org/buglist.cgi?bug_id='), ('nb', 'https://projects.maemo.org/bugzilla/buglist.cgi?bug_id='), ('nokia', 'https://projects.maemo.org/bugzilla/buglist.cgi?bug_id='), ('gcc', 'http://gcc.gnu.org/bugzilla/buglist.cgi?bug_id='), ('libc', 'http://sources.redhat.com/bugzilla/buglist.cgi?bug_id='), ('moz', 'https://bugzilla.mozilla.org/buglist.cgi?bug_id='), ('mozilla', 'https://bugzilla.mozilla.org/buglist.cgi?bug_id='), ('xm', 'http://bugzilla.xamarin.com/buglist.cgi?id='), ('xamarin', 'http://bugzilla.xamarin.com/buglist.cgi?id='), ) # URL authentication handling def auth_urls(): import ConfigParser, os, re cp = ConfigParser.SafeConfigParser() cp.add_section ('authentication-urls') cp.set ('authentication-urls', 'urls', '') cp.read (os.path.expanduser ('~/.urlrc')) urlstr = cp.get ('authentication-urls', 'urls') # space separated url list urls = re.split ("\s*", urlstr.strip()) # list urls urls = [u for u in urls if u] # strip empty urls global auth_urls; auth_urls = lambda : urls # cache result for the future return urls def add_auth (url): for ai in auth_urls(): prefix = re.sub ('//[^:/@]*:[^:/@]*@', '//', ai) if url.startswith (prefix): pl = len (prefix) return ai + url[pl:] return url
# carry out online bug queries def bug_summaries (buglisturl): if not buglisturl: re
turn [] # Bugzilla query to use query = buglisturl + '&ctype=csv' # buglisturl.replace (',', '%2c') query = add_auth (query) f = urllib.urlopen (query) csvdata = f.read() f.close() # read CSV lines reader = csv.reader (csvdata.splitlines (1)) # parse head to interpret columns col_bug_id = -1 col_description = -1 header = reader.next() i = 0 for col in header: col = col.strip() if col == 'bug_id': col_bug_id = i if col == 'short_short_desc': col_description = i elif col_description < 0 and col == 'short_desc': col_description = i i = i + 1 if col_bug_id < 0: print >>sys.stderr, 'Failed to identify bug_id from CSV data' sys.exit (11) if col_description < 0: print >>sys.stderr, 'Failed to identify description columns from CSV data' sys.exit (12) # parse bug list result = [] summary = '' for row in reader: bug_number = row[col_bug_id] description = row[col_description] result += [ (bug_number, description) ] return result # parse bug numbers and list bugs def read_handle_bugs (config, url): lines = sys.stdin.read() # print >>sys.stderr, 'Using bugzilla URL: %s' % (bz, url) for line in [ lines ]: # find all bug numbers bugs = re.findall (r'\b[0-9]+\b', line) # int-convert, dedup and sort bug numbers ibugs = [] if bugs: bught = {} for b in bugs: b = int (b) if not b or bught.has_key (b): continue bught[b] = True ibugs += [ b ] del bugs if config.get ('sort', False): ibugs.sort() # construct full query URL fullurl = url + ','.join ([str (b) for b in ibugs]) # print fullurl if len (ibugs) and config.get ('show-query', False): print fullurl # print bug summaries if len (ibugs) and config.get ('show-list', False): bught = {} for bug in bug_summaries (fullurl): bught[int (bug[0])] = bug[1] # bug summaries can have random order for bugid in ibugs: # print bugs in user provided order iid = int (bugid) if bught.has_key (iid): desc = bught[iid] if len (desc) >= 70: desc = desc[:67].rstrip() + '...' print "% 7u - %s" % (iid, desc) else: print "% 7u (NOBUG)" % iid def help (version = False, verbose = False): print "buglist %s (%s, %s)" % (pkginstall_configvars['VERSION'], pkginstall_configvars['PACKAGE_NAME'], pkginstall_configvars['REVISION']) print "Redistributable under GNU GPLv3 or later: http://gnu.org/licenses/gpl.html" if version: # version *only* return print "Usage: %s [options] <BUG-TRACKER> " % os.path.basename (sys.argv[0]) print "List or download bugs from a bug tracker. Bug numbers are read from stdin." if not verbose: print "Use the --help option for verbose usage information." return # 12345678911234567892123456789312345678941234567895123456789612345678971234567898 print "Options:" print " -h, --help Print verbose help message." print " -v, --version Print version information." print " -U Keep bug list unsorted." print " --bug-tracker-list List supported bug trackers." print "Authentication:" print " An INI-style config file is used to associate bugzilla URLs with account" print " authentication for secured installations. The file should be unreadable" print " by others to keep passwords secret, e.g. with: chmod 0600 ~/.urlrc" print " A sample ~/.urlrc might look like this:" print "\t# INI-style config file for URLs" print "\t[authentication-urls]" print "\turls =\thttps://USERNAME:PASSWORD@projects.maemo.org/bugzilla" print "\t\thttp://BLOGGER:PASSWORD@blogs.gnome.org/BLOGGER/xmlrpc.php" def main (): import getopt # default configuration config = { 'sort' : True, 'show-query' : True, 'show-list' : True, } # parse options try: options, args = getopt.gnu_getopt (sys.argv[1:], 'vhU', [ 'help', 'version', 'bug-tracker-list' ]) except getopt.GetoptError, err: print >>sys.stderr, "%s: %s" % (os.path.basename (sys.argv[0]), str (err)) help() sys.exit (126) for arg, val in options: if arg == '-h' or arg == '--help': help (verbose=True); sys.exit (0) if arg == '-v' or arg == '--version': help (version=True); sys.exit (0) if arg == '-U': config['sort'] = False if arg == '--bug-tracker-list': print "Bug Tracker:" for kv in bugurls: print " %-20s %s" % kv sys.exit (0) if len (args) < 1: print >>sys.stderr, "%s: Missing bug tracker argument" % os.path.basename (sys.argv[0]) help() sys.exit (126) trackerdict = dict (bugurls) if not trackerdict.has_key (args[0]): print >>sys.stderr, "%s: Unknown bug tracker: %s" % (os.path.basename (sys.argv[0]), args[0]) sys.exit (10) # handle bugs read_handle_bugs (config, trackerdict[args[0]]) if __name__ == '__main__': main()
# *-* coding:utf-8 *-* from functools import partial # 可读性好 # range() print range
(0,9,2) #递增列表 for i in xrange(0,9,2): # 只用于for 循环中 print i albums = ("Poe","Gaudi","Freud","Poe2") years = (1976,1987,1990,2003) for album in sorted(albums): print album for album in reversed(albums): print album for i,album in enumerate(albums): print i,album for album,yr in
zip(albums,years): print album,yr # 列表表达式 # 8.12 列表解析,列表解析的表达式比map lambda效率更高 def fuc(a): return a**2 x = range(1,10,1) print x print map(fuc,x) # map 对所有的列表元素执行一个操作 # lambda 创建只有一行的函数 -- 只使用一次,非公用的函数 print map(lambda x:x**2,range(6)) print [x**2 for x in range(6) if x>3] print filter(lambda x:x%2,range(10)) print [x for x in range(10) if x%2] # 11.7.2 函数式编程 print range(6) print reduce(lambda x,y:x+y,range(6)) # 累加 # 偏函数 简化代码,提高代码速度 int2 = partial(int,base=2) print int2('1000') # 闭包 # 列表生成器 g = (x for x in range(10)) print g.next() print "------" for n in g: print n # 匿名函数,无函数名,将函数赋给变量 f = lambda x:x*x print f(2) # 装饰器 def log(): print 'log' def now(): print 'time is:','2017-09-14' # 字典
t=None): acquirer = self.browse(cr, uid, id, context=context) return self._get_paypal_urls(cr, uid, acquirer.environment, context=context)['paypal_form_url'] def _paypal_s2s_get_access_token(self, cr, uid, ids, context=None): """ Note: see # see http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem for explanation why we use Authorization header instead of urllib2 password manager """ res = dict.fromkeys(ids, False) parameters = werkzeug.url_encode({'grant_type': 'client_credentials'}) for acquirer in self.browse(cr, uid, ids, context=context): tx_url = self._get_paypal_urls(cr, uid, acquirer.environment)['paypal_rest_url'] request = urllib2.Request(tx_url, parameters) # add other headers (https://developer.paypal.com/webapps/developer/docs/integration/direct/make-your-first-call/) request.add_header('Accept', 'application/json') request.add_header('Accept-Language', tools.config.defaultLang) # add authorization header base64string = base64.encodestring('%s:%s' % ( acquirer.paypal_api_username, acquirer.paypal_api_password) ).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) request = urllib2.urlopen(request) result = request.read() res[acquirer.id] = json.loads(result).get('access_token') request.close() return res class TxPaypal(osv.Model): _inherit = 'payment.transaction' _columns = { 'paypal_txn_id': fields.char('Transaction ID'), 'paypal_txn_type': fields.char('Transaction type'), } # -------------------------------------------------- # FORM RELATED METHODS # -------------------------------------------------- def _paypal_form_get_tx_from_data(self, cr, uid, data, context=None): reference, txn_id = data.get('item_number'), data.get('txn_id') if not reference or not txn_id: error_msg = 'Paypal: received data with missing reference (%s) or txn_id (%s)' % (reference, txn_id) _logger.error(error_msg) raise ValidationError(error_msg) # find tx -> @TDENOTE use txn_id ? tx_ids = self.pool['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context) if not tx_ids or len(tx_ids) > 1: error_msg = 'Paypal: received data for reference %s' % (reference) if not tx_ids: error_msg += '; no order found' else: error_msg += '; multiple order found' _logger.error(error_msg) raise ValidationError(error_msg) return self.browse(cr, uid, tx_ids[0], context=context) def _paypal_form_get_invalid_parameters(self, cr, uid, tx, data, context=None): invalid_parameters = [] _logger.info('Received a notification from Paypal with IPN version %s', data.get('notify_version')) if data.get('test_ipn'): _logger.warning( 'Received a notification from Paypal using sandbox' ), # TODO: txn_id: shoudl be false at draft, set afterwards, and verified with txn details if tx.acquirer_reference and data.get('txn_id') != tx.acquirer_reference: invalid_parameters.append(('txn_id', data.get('txn_id'), tx.acquirer_reference)) # check what is buyed if float_compare(float(data.get('mc_gross', '0.0')), (tx.amount + tx.fees), 2) != 0: invalid_parameters.append(('mc_gross', data.get('mc_gross'), '%.2f' % tx.amount)) # mc_gross is amount + fees if data.get('mc_currency') != tx.currency_id.name: invalid_parameters.append(('mc_currency', data.get('mc_currency'), tx.currency_id.name)) if '
handling_amount' in data and float_compare(float(data.get('handling_amount')), tx.fees, 2) != 0: invalid_parameters.append(('handling_amount', data.get('handling_amount'), tx.fees)) # check buyer if tx.partner_reference and data.get('payer_id') != tx.partner_reference: invalid_parameters.append(('payer_id', data.get('payer_id'), tx.partner_reference)) # check seller if data.get('receiver_id') and tx.acquirer_
id.paypal_seller_account and data['receiver_id'] != tx.acquirer_id.paypal_seller_account: invalid_parameters.append(('receiver_id', data.get('receiver_id'), tx.acquirer_id.paypal_seller_account)) if not data.get('receiver_id') or not tx.acquirer_id.paypal_seller_account: # Check receiver_email only if receiver_id was not checked. # In Paypal, this is possible to configure as receiver_email a different email than the business email (the login email) # In Odoo, there is only one field for the Paypal email: the business email. This isn't possible to set a receiver_email # different than the business email. Therefore, if you want such a configuration in your Paypal, you are then obliged to fill # the Merchant ID in the Paypal payment acquirer in Odoo, so the check is performed on this variable instead of the receiver_email. # At least one of the two checks must be done, to avoid fraudsters. if data.get('receiver_email') != tx.acquirer_id.paypal_email_account: invalid_parameters.append(('receiver_email', data.get('receiver_email'), tx.acquirer_id.paypal_email_account)) return invalid_parameters def _paypal_form_validate(self, cr, uid, tx, data, context=None): status = data.get('payment_status') data = { 'acquirer_reference': data.get('txn_id'), 'paypal_txn_type': data.get('payment_type'), 'partner_reference': data.get('payer_id') } if status in ['Completed', 'Processed']: _logger.info('Validated Paypal payment for tx %s: set as done' % (tx.reference)) data.update(state='done', date_validate=data.get('payment_date', fields.datetime.now())) return tx.write(data) elif status in ['Pending', 'Expired']: _logger.info('Received notification for Paypal payment %s: set as pending' % (tx.reference)) data.update(state='pending', state_message=data.get('pending_reason', '')) return tx.write(data) else: error = 'Received unrecognized status for Paypal payment %s: %s, set as error' % (tx.reference, status) _logger.info(error) data.update(state='error', state_message=error) return tx.write(data) # -------------------------------------------------- # SERVER2SERVER RELATED METHODS # -------------------------------------------------- def _paypal_try_url(self, request, tries=3, context=None): """ Try to contact Paypal. Due to some issues, internal service errors seem to be quite frequent. Several tries are done before considering the communication as failed. .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before OpenERP v8 official release. """ done, res = False, None while (not done and tries): try: res = urllib2.urlopen(request) done = True except urllib2.HTTPError as e: res = e.read() e.close() if tries and res and json.loads(res)['name'] == 'INTERNAL_SERVICE_ERROR': _logger.warning('Failed contacting Paypal, retrying (%s remaining)' % tries) tries = tries - 1 if not res: pass # raise openerp.exceptions. result = res.read() res.close() return result def _paypal_s2s_send(self, cr, uid, values, cc_values, context=None): """ .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before Ope
# Copyright (C) 2003-2009 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ A stub SFTP server for loopback SFTP testing. """ import os from paramiko import ServerInterface, SFTPServerInterface, SFTPServer, SFTPAttributes, \ SFTPHandle, SFTP_OK, AUTH_SUCCESSFUL, AUTH_FAILED, OPEN_SUCCEEDED, RSAKey from paramiko.common import o666 from pysftpserver.tests.utils import t_path USERNAME = "test" PASSWORD = "secret" RSA_KEY = t_path("id_rsa") SERVER_ROOT = "server_root" class StubServer (ServerInterface): good_pub_key = RSAKey(filename=RSA_KEY) def check_auth_password(self, username, password): if username == USERNAME and password == PASSWORD: return AUTH_SUCCESSFUL return AUTH_FAILED def check_auth_publickey(self, username, key): if username == USERNAME and key == self.good_pub_key: return AUTH_SUCCESSFUL return AUTH_FAILED def check_channel_request(self, kind, chanid): return OPEN_SUCCEEDED class StubSFTPHandle (SFTPHandle): def stat(self): try: return SFTPAttributes.from_stat(os.fstat(self.readfile.fileno())) except OSError as e: return SFTPServer.convert_errno(e.errno) def chattr(self, attr): # python doesn't have equivalents to fchown or fchmod, so we have to # use the stored filename try: SFTPServer.set_file_attr(self.filename, attr) return SFTP_OK except OSError as e: return SFTPServer.convert_errno(e.errno) class StubSFTPServer (SFTPServerInterface): ROOT = t_path(SERVER_ROOT) def _realpath(self, path): return self.ROOT + self.canonicalize(path) def list_folder(self, path): path = self._realpath(path) try: out = [] flist = os.listdir(path) for fname in flist: attr = SFTPAttributes.from_stat( os.lstat(os.path.join(path, fname))) attr.filename = fname out.append(attr) return out except OSError as e: return SFTPServer.convert_errno(e.errno) def stat(self, path): path = self._realpath(path) try: return SFTPAttributes.from_stat(os.stat(path)) except OSError as e: return SFTPServer.convert_errno(e.errno) def lstat(self, path): path = self._realpath(path) try: return SFTPAttributes.from_stat(os.lstat(path)) except OSError as e: return SFTPServer.convert_errno(e.errno) def open(self, path, flags, attr): path = self._realpath(path) try: binary_flag = getattr(os, 'O_BINARY', 0) flags |= binary_flag mode = getattr(attr, 'st_mode', None) if mode is not None: fd = os.open(path, flags, mode) else: # os.open() defaults to 0777 which is # an odd default mode for files fd = os.open(path, flags, o666) except OSError as e: return SFTPServer.convert_errno(e.errno) if (flags & os.O_CREAT) and (attr is not None): attr._flags &= ~attr.FLAG_PERMISSIONS SFTPServer.set_file_attr(path, attr) if flags & os.O_WRONLY: if flags & os.O_APPEND: fstr = 'ab' else: fstr = 'wb' elif flags & os.O_RDWR: if flags & os.O_APPEND: fstr = 'a+b' else: fstr = 'r+b' else: # O_RDONLY (== 0) fstr = 'rb' try: f = os.fdopen(fd, fstr) except OSError as e: return SFTPServer.convert_errno(e.errno) fobj = StubSFTPHandle(flags) fobj.filename = path fobj.readfile = f fobj.writefile = f return fobj def remove(self, path): path = self._realpath(path) try: os.remove(path) except OSError as e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def rename(self, oldpath, newpath): oldpath = self._realpath(oldpath) newpath = self._realpath(newpath) try: os.rename(oldpath, newpath) except OSError as e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def mkdir(self, path, attr): path = self._realpath(path) try: os.mkdir(path) if attr is not None: SFTPServer.set_file_attr(path, attr) except OSError as e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def rmdir(self, path): path = self._realpath(path) try: os.rmdir(path) except OSError as e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def chattr(self, path, attr): path = self._realpath(path) try: SFTPServer.set_file_attr(path, attr) except OSError as e: return SFTPServer.convert_errno(e.errno) return SFTP_OK def symlink(self, target_path, path): path = self._realpath(path) if (len(target_path) > 0) and (target_path[0] == '/'): # absolute symlink target_path = os.path.j
oin(self.ROOT, target_path[1:]) try: os.symlink(target_path, path) except OSError as e: return SFTPServer.convert_errno(e.errno) return SFTP_OK
def readlink(self, path): path = self._realpath(path) try: symlink = os.readlink(path) except OSError as e: return SFTPServer.convert_errno(e.errno) # if it's absolute, remove the root if os.path.isabs(symlink): if symlink[:len(self.ROOT)] == self.ROOT: symlink = symlink[len(self.ROOT):] if (len(symlink) == 0) or (symlink[0] != '/'): symlink = '/' + symlink else: symlink = '<error>' return symlink
, tmax, tstep): times = numpy.array(range(tmin, tmax, tstep)) spike_ids = sorted(spike_list) possible_neurons = range(min_idx, max_idx) spikeArray = dict([(neuron, times) for neuron in spike_ids if neuron in possible_neurons]) return spikeArray def convert_file_to_spikes(input_file_name, min_idx=None, max_idx=None, tmin=None, tmax=None, compatible_input=True): data = numpy.array(numpy.loadtxt(fname=input_file_name), dtype=int) # get the array from the original text file if compatible_input: data = numpy.roll(data, 1, axis=1) # swap neuron ID and time if necessary if min_idx is None: min_idx = numpy.fmin.reduce(data[:,0], 0) if max_idx is None: max_idx = numpy.fmax.reduce(data[:,0], 0) + 1 if tmin is None: tmin = numpy.fmin.reduce(data[:,1], 0) if tmax is None: tmax = numpy.fmax.reduce(data[:,1], 0) data = data[(data[:,1]>=tmin) & (data[:,1]<tmax) & (data[:,0]>=min_idx) & (data[:,0]<max_idx),:] # filter by mins and maxes if data.shape == (0,): return {} # nothing left: return an empty dict. sort_keys = numpy.lexsort((data[:,1], data[:,0])) # otherwise sort, grouping by neuron ID then time. data = data[sort_keys,:] spiking_neurons = itertools.groupby(data, lambda x: x[0]) # and taking one group at a time#, spikeArray = dict([(neuron[0], numpy.array([spike_time[1] for spike_time in neuron[1]])) for neuron in spiking_neurons]) # create a dictionary indexed by neuron number of the spike times. return spikeArray def loop_array(input_array, runtime=0, num_repeats=1, sampletime=0): spikeArray = {} for neuron in input_array: if not sampletime: sampletime = int(numpy.fmax.reduce(input_array[neuron],0)) last_array = numpy.array([]) if sampletime*num_repeats < runtime or (runtime > 0 and sampletime*num_repeats > runtime): num_repeats = runtime/sampletime last_array = input_array[neuron][input_array[neuron] <= (runtime%sampletime)] spikeArray[neuron] = numpy.concatenate([input_array[neuron]+repeat*sampletime for repeat in range(num_repeats)]) if len(last_array): spikeArray[neuron] = numpy.concatenate([spikeArray[neuron], last_array]) return spikeArray def splice_arrays(input_arrays, input_times=None, input_neurons=None): spikeArray = {} if input_neurons is None: input_neurons = [None]*len(input_arrays) if input_times is None: input_times = [[(reduce(lambda x, y: min(x, numpy.fmin.reduce(y,0)), input_group.values(), 0), reduce(lambda x, y: max(x, numpy.fmax.reduce(y,0)), input_group.values(), 0))] for input_group in input_arrays] for in_idx in range(len(input_arrays)): for neuron in input_arrays[in_idx].items(): if input_neurons[in_idx] is None or neuron[0] in input_neurons[in_idx]: for time_range in input_times[in_idx]: if time_range is None: time_range = (reduce(lambda x, y: min(x, numpy.fmin.reduce(y,0)), input_arrays[in_idx].values(), 0), reduce(lambda x, y: max(x, numpy.fmax.reduce(y,0)), input_arrays[in_idx].values(), 0)) if neuron[0] in spikeArray: spikeArray[neuron[0]].extend([time for time in neuron[1] if time >= time_range[0] and time < time_range[1]]) else: spikeArray[neuron[0]] = [time for time in neuron[1] if time >= time_range[0] and time < time_range[1]] for neuron in spikeArray.items(): spikeArray[neuron[0]] = numpy.sort(numpy.unique(numpy.array(neuron[1]))) return spikeArray def splice_files(input_files, input_times=None, input_neurons=None, compat
ible_input=True): # splice_files expects a list of files, a list of lists, one for each file, giving the onset # and offset times for each file, and a list of neuro
ns relevant to each file, which will be # spliced together into a single spike list. spikeArray = {} if input_times is None: input_times = [[(None, None)] for file_idx in len(input_files)] for file_idx in len(input_files): if input_neurons is None or input_neurons[file_idx] is None: max_neuron_id = numpy.fmax.reduce(input_files[file_idx].keys(), 0) + 1 min_neuron_id = numpy.fmin.reduce(input_files[file_idx].keys(), 0) else: max_neuron_id = numpy.fmax.reduce(input_neurons[file_idx], 0) + 1 min_neuron_id = numpy.fmin.reduce(input_neurons[file_idx], 0) for time_range in input_times[file_idx]: for neuron in convert_file_to_spikes(input_file_name=input_files[file_idx], min_idx=min_neuron_id, max_idx=max_neuron_id, tmin=time_range[0], tmax=time_range[1], compatible_input=compatible_input).items(): if neuron[0] in spikeArray: spikeArray[neuron[0]].append(neuron[1]) else: spikeArray[neuron[0]] = neuron[1] for neuron in spikeArray.items(): spikeArray[neuron[0]] = numpy.sort(numpy.unique(numpy.array(neuron[1]))) return spikeArray def subsample_spikes_by_time(spikeArray, start, stop, step): subsampledArray = {} for neuron in spikeArray: times = numpy.sort(spikeArray[neuron][(spikeArray[neuron] >= start) & (spikeArray[neuron] < stop)]) interval = step/2 + step%2 t_now = times[0] t_start = times[0] t_last = len(times) t_index = 0 subsampled_times = [] while t_index < t_last: spikes_in_interval = 0 while t_index < t_last and times[t_index] <= t_start + interval: spikes_in_interval += 1 if spikes_in_interval >= interval: t_start = times[t_index] + interval subsampled_times.append(times[t_index]) try: t_index = next(i for i in range(t_index, t_last) if times[i] >= t_start) except StopIteration: t_index = t_last break t_index += 1 else: if t_index < t_last: t_start = times[t_index] subsampledArray[neuron] = numpy.array(subsampled_times) return subsampledArray def random_skew_times(spikeArray, skewtime, seed=3425670): random.seed(seed) #return dict([(neuron, [int(abs(t+random.uniform(-skewtime, skewtime))) for t in spikeArray[neuron]]) for neuron in spikeArray]) spikeDict = dict([(neuron, numpy.array(numpy.fabs(spikeArray[neuron]+numpy.random.uniform(-skewtime, skewtime, len(spikeArray[neuron]))), dtype=int)) for neuron in spikeArray]) #test_out = open('spikeArray.txt', 'w') #test_out.write('%s' % spikeDict) #test_out.close() return spikeDict def generate_shadow_spikes(spikeArray, dim_x, dim_y, move_velocity): """ generate a second set of spikes as if coming from a DVS retina. imagines that the offset pixels perfectly register with perfect timing precision. args: spikeArray, in the format above for an array of Spikes: a dict of {id: [times]} dim_x, size of the field in the x-dimension dim_y, size of the field in the y-dimension move_velocity: an (s, theta) tuple, where s is the speed measured in pixels/ms, theta the angle of virtual movement measured in radians anticlockwise (0 is horizontal movement to the right). The function will displace the shadow by s pixels in the reverse of direction indicated for each time point where spikes are registered. It will add one last set of spikes at time tmax+1, at position of the source spikes at time tmax. """ motion_x = -move_velocity[0]*math.cos(move_velocity[1]) motion_y = -move_velocity[0]*math.sin(move_velocity[1]) spikeArray_out = dict([(int(motion_x+spike[0]%dim_x)+dim_x*int(motion_y+spike[0]/dim_x), spike[1][1:]) for spike in spikeArray.items() if len(spike[1]) > 1]) spikeArray_out = dict([item for item in spikeArray_out.items()
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import nu
mpy as np import os sourcefiles = [ 'fast_likelihood.pyx'] ext_modules = [Extension("fast_likelihood", sourcefiles, include_dirs = [np.get_include()], extra_compile_args=['-O3', '-fopenmp', '-lc++'], extra_link_args=['-fopenmp'], language='c++')] setup( name = 'fastgmm', cmdclass = {'build_ext': build_ext}, ext
_modules = ext_modules )
# Extend the b26 chain to make sure bitcoind isn't accepting b26 b27 = block(27, spend=out[7]) yield rejected(RejectResult(0, b'bad-prevblk')) # Now try a too-large-coinbase script tip(15) b28 = block(28, spend=out[6]) b28.vtx[0].vin[0].scriptSig = b'\x00' * 101 b28.vtx[0].rehash() b28 = update_block(28, []) yield rejected(RejectResult(16, b'bad-cb-length')) # Extend the b28 chain to make sure bitcoind isn't accepting b28 b29 = block(29, spend=out[7]) yield rejected(RejectResult(0, b'bad-prevblk')) # b30 has a max-sized coinbase scriptSig. tip(23) b30 = block(30) b30.vtx[0].vin[0].scriptSig = b'\x00' * 100 b30.vtx[0].rehash() b30 = update_block(30, []) yield accepted() save_spendable_output() # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY # # genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b36 (11) # \-> b34 (10) # \-> b32 (9) # # MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end. lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) b31 = block(31, spend=out[8], script=lots_of_multisigs) assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS) yield accepted() save_spendable_output() # this goes over the limit because the coinbase has one sigop too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20)) b32 = block(32, spend=out[9], script=too_many_multisigs) assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1) yield rejected(RejectResult(16, b'bad-blk-sigops')) # CHECKMULTISIGVERIFY tip(31) lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) block(33, spend=out[9], script=lots_of_multisigs) yield accepted() save_spendable_output() too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20)) blo
ck(34, spend=out[10], script=too_many_multisigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # CHECKSIGVERIFY tip(33) lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1)) b35 = block(35, spend=out[10], script=lots_of_checksigs) yield accepted() save_s
pendable_output() too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS)) block(36, spend=out[11], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # Check spending of a transaction in a block which failed to connect # # b6 (3) # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b37 (11) # \-> b38 (11/37) # # save 37's spendable output, but then double-spend out11 to invalidate the block tip(35) b37 = block(37, spend=out[11]) txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0) tx = create_and_sign_tx(out[11].tx, out[11].n, 0) b37 = update_block(37, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid tip(35) block(38, spend=txout_b37) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Check P2SH SigOp counting # # # 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) # \-> b40 (12) # # b39 - create some P2SH outputs that will require 6 sigops to spend: # # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL # tip(35) b39 = block(39) b39_outputs = 0 b39_sigops_per_output = 6 # Build the redeem script, hash it, use hash to create the p2sh script redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE # This must be signed because it is spending a coinbase spend = out[11] tx = create_tx(spend.tx, spend.n, 1, p2sh_script) tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE]))) self.sign_tx(tx, spend.tx, spend.n) tx.rehash() b39 = update_block(39, [tx]) b39_outputs += 1 # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE tx_new = None tx_last = tx total_size=len(b39.serialize()) while(total_size < MAX_BLOCK_BASE_SIZE): tx_new = create_tx(tx_last, 1, 1, p2sh_script) tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE]))) tx_new.rehash() total_size += len(tx_new.serialize()) if total_size >= MAX_BLOCK_BASE_SIZE: break b39.vtx.append(tx_new) # add tx to block tx_last = tx_new b39_outputs += 1 b39 = update_block(39, []) yield accepted() save_spendable_output() # Test sigops in P2SH redeem scripts # # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. # The first tx has one sigop and then at the end we add 2 more to put us just over the max. # # b41 does the same, less one, so it has the maximum sigops permitted. # tip(39) b40 = block(40, spend=out[12]) sigops = get_legacy_sigopcount_block(b40) numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output assert_equal(numTxes <= b39_outputs, True) lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) new_txs = [] for i in range(1, numTxes+1): tx = CTransaction() tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) tx.vin.append(CTxIn(lastOutpoint, b'')) # second input is corresponding P2SH output from b39 tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) # Note: must pass the redeem_script (not p2sh_script) to the signature hash function (sighash, err) = SignatureHash(redeem_script, tx, 1, SIGHASH_ALL) sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL])) scriptSig = CScript([sig, redeem_script]) tx.vin[1].scriptSig = scriptSig tx.rehash() new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) tx.rehash() new_txs.append(tx) update_block(40, new_txs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # same as b40, but one less sigop tip(39) b41 = block(41, spend=None) update_block(41, b40.vtx[1:-1]) b41_sigops_to_fill
# encoding: utf-8 import datetime import django from south.db import db from south.v2 import SchemaMigration from django.db import models # Django 1.5+ compatibility if django.VERSION >= (1, 5): from django.contrib.auth import get_user_model else: from django.contrib.auth.models import User def get_user_model(): return User class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Flag' db.create_table('waffle_flag', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)), ('everyone', self.gf('django.d
b.models.fields.NullBooleanField')(null=True, blank=True)), ('percent', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=3, decimal_places=1, blank=True)), ('superusers', self.gf('django.db.models.fields.BooleanField')(default=True)), ('staff', self.gf('django.db.models.fields.BooleanField')(default=False)), ('authenticated', self.gf('django.db.models.fields.BooleanField')(default=Fal
se)), ('rollout', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('waffle', ['Flag']) # Adding M2M table for field groups on 'Flag' db.create_table('waffle_flag_groups', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('flag', models.ForeignKey(orm['waffle.flag'], null=False)), ('group', models.ForeignKey(orm['auth.group'], null=False)) )) db.create_unique('waffle_flag_groups', ['flag_id', 'group_id']) # Adding M2M table for field users on 'Flag' db.create_table('waffle_flag_users', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('flag', models.ForeignKey(orm['waffle.flag'], null=False)), ('user', models.ForeignKey(get_user_model(), null=False)) )) db.create_unique('waffle_flag_users', ['flag_id', 'user_id']) # Adding model 'Switch' db.create_table('waffle_switch', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)), ('active', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('waffle', ['Switch']) def backwards(self, orm): # Deleting model 'Flag' db.delete_table('waffle_flag') # Removing M2M table for field groups on 'Flag' db.delete_table('waffle_flag_groups') # Removing M2M table for field users on 'Flag' db.delete_table('waffle_flag_users') # Deleting model 'Switch' db.delete_table('waffle_switch') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'waffle.flag': { 'Meta': {'object_name': 'Flag'}, 'authenticated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'everyone': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}), 'percent': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '3', 'decimal_places': '1', 'blank': 'True'}), 'rollout': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'superusers': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'}) }, 'waffle.switch': { 'Meta': {'object_name': 'Switch'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}) } } complete_apps = ['waffle']
# -*- Mode: Python; py-indent-offset: 4 -*- # pygobject - Python bindings for the GObject library # Copyright (C) 2006-2012 Johan Dahlin # # glib/__init__.py: initialisation file for glib module # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA from . import _glib # Internal API _PyGLib_API = _glib._PyGLib_API # Types GError = _glib.GError IOChannel = _glib.IOChannel Idle = _glib.Idle MainContext = _glib.MainContext MainLoop = _glib.MainLoop OptionContext = _glib.OptionContext OptionGroup = _glib.OptionGroup Pid = _glib.Pid PollFD = _glib.PollFD Source = _glib.Source Timeout = _glib.Timeout # Constants IO_ERR = _glib.IO_ERR IO_FLAG_APPEND = _glib.IO_FLAG_APPEND IO_FLAG_GET_MASK = _glib.IO_FLAG_GET_MASK IO_FLAG_IS_READABLE = _glib.IO_FLAG_IS_READABLE IO_FLAG_IS_SEEKABLE = _glib.IO_FLAG_IS_SEEKABLE IO_FLAG_IS_WRITEABLE = _glib.IO_FLAG_IS_WRITEABLE IO_FLAG_MASK = _glib.IO_FLAG_MASK IO_FLAG_NONBLOCK = _glib.IO_FLAG_NONBLOCK IO_FLAG_SET_MASK = _glib.IO_FLAG_SET_MASK IO_HUP = _glib.IO_HUP IO_IN = _glib.IO_IN IO_NVAL = _glib.IO_NVAL IO_OUT = _glib.IO_OUT IO_PRI = _glib.IO_PRI IO_STATUS_AGAIN = _glib.IO_STATUS_AGAIN IO_STATUS_EOF = _glib.IO_STATUS_EOF IO_STATUS_ERROR = _glib.IO_STATUS_ERROR IO_STATUS_NORMAL = _glib.IO_STATUS_NORMAL OPTION_ERROR = _glib.OPTION_ERROR OPTION_ERROR_BAD_VALUE = _glib.OPTION_ERROR_BAD_VALUE OPTION_ERROR_FAILED = _glib.OPTION_ERROR_FAILED OPTION_ERROR_UNKNOWN_OPTION = _glib.OPTION_ERROR_UNKNOWN_OPTION OPTION_FLAG_FILENAME = _glib.OPTION_FLAG_FILENAME OPTION_FLAG_HIDDEN = _glib.OPTION_FLAG_HIDDEN OPTION_FLAG_IN_MAIN = _glib.OPTION_FLAG_IN_MAIN OPTION_FLAG_NOALIAS = _glib.OPTION_FLAG_NOALIAS OPTION_FLAG_NO_ARG = _glib.OPTION_FLAG_NO_ARG OPTION_FLAG_OPTIONAL_ARG = _glib.OPTION_FLAG_OPTIONAL_ARG OPTION_FLAG_REVERSE = _glib.OPTION_FLAG_REVERSE OPTION_REMAINING = _glib.OPTION_REMAINING PRIORITY_DEFAULT = _glib.PRIORITY_DEFAULT PRIORITY_DEFAULT_IDLE = _glib.PRIORITY_DEFAULT_IDLE PRIORITY_HIGH = _glib.PRIORITY_HIGH PRIORITY_HIGH_IDLE = _glib.PRIORITY_HIGH_IDLE PRIORITY_LOW = _glib.PRIORITY_LOW SPAWN_CHILD_INHERITS_STDIN = _glib.SPAWN_CHILD_INHERITS_STDIN SPAWN_DO_NOT_REAP_CHILD = _glib.SPAWN_DO_NOT_REAP_CHILD SPAWN_FILE_AND_ARGV_ZERO = _glib.SPAWN_FILE_AND_ARGV_ZERO SPAWN_LEAVE_DESCRIPTORS_OPEN = _glib.SPAWN_LEAVE_DESCRIPTORS_OPEN SPAWN_SEARCH_PATH = _glib.SPAWN_SEARCH_PATH SPAWN_STDERR_TO_DEV_NULL = _glib.SPAWN_STDERR_TO_DEV_NULL SPAWN_STDOUT_TO_DEV_NULL = _glib.SPAWN_STDOUT_TO_DEV_NULL USER_DIRECTORY_DESKTOP = _glib.USER_DIRECTORY_DESKTOP USER_DIRECTORY_DOCUMENTS = _glib.USER_DIRECTORY_DOCUMENTS USER_DIRECTORY_DOWNLOAD = _glib.USER_DIRECTORY_DOWNLOAD USER_DIRECTORY_MUSIC = _glib.USER_DIRECTORY_MUSIC USER_DIRECTORY_PICTURES = _glib.USER_DIRECTORY_PICTURES USER_DIRECTORY_PUBLIC_SHARE = _glib.USER_DIRECTORY_PUBLIC_SHARE USER_DIRECTORY_TEMPLATES = _glib.USER_DIRECTORY_TEMPLATES USER_DIRECTORY_VIDEOS = _glib.USER_DIRECTORY_VIDEOS # Functions child_watch_add = _glib.child_watch_add filename_display_basenam
e = _glib.filename_display_basename filename_display_name = _glib.filename_display_name filename_from_utf8 = _glib.filename_from_utf8 find_program_in_path = _glib.find_program_in_path get_applicat
ion_name = _glib.get_application_name get_current_time = _glib.get_current_time get_prgname = _glib.get_prgname get_system_config_dirs = _glib.get_system_config_dirs get_system_data_dirs = _glib.get_system_data_dirs get_user_cache_dir = _glib.get_user_cache_dir get_user_config_dir = _glib.get_user_config_dir get_user_data_dir = _glib.get_user_data_dir get_user_special_dir = _glib.get_user_special_dir glib_version = _glib.glib_version idle_add = _glib.idle_add io_add_watch = _glib.io_add_watch main_context_default = _glib.main_context_default main_depth = _glib.main_depth markup_escape_text = _glib.markup_escape_text pyglib_version = _glib.pyglib_version set_application_name = _glib.set_application_name set_prgname = _glib.set_prgname source_remove = _glib.source_remove spawn_async = _glib.spawn_async threads_init = _glib.threads_init timeout_add = _glib.timeout_add timeout_add_seconds = _glib.timeout_add_seconds uri_list_extract_uris = _glib.uri_list_extract_uris
""" HTMLParser-based link extractor """ from HTMLParser import HTMLParser from urlparse import urljoin from w3lib.url import safe_url_string from scrapy.link import Link from scrapy.utils.python import unique as unique_list class HtmlParserLinkExtractor(HTMLParser): def __init__(self, tag="a", attr="href", process=None, unique=False): HTMLParser.__init__(self) self.scan_tag = tag if callable(tag) else lambda t: t == tag self.scan_attr = attr if callable(attr) else lambda a: a == attr self.process_attr = process if callable(process) else lambda v: v self.unique = unique def _extract_links(self, response_text, response_url, response_encoding): self.reset() self.feed(res
ponse
_text) self.close() links = unique_list(self.links, key=lambda link: link.url) if self.unique else self.links ret = [] base_url = urljoin(response_url, self.base_url) if self.base_url else response_url for link in links: if isinstance(link.url, unicode): link.url = link.url.encode(response_encoding) link.url = urljoin(base_url, link.url) link.url = safe_url_string(link.url, response_encoding) link.text = link.text.decode(response_encoding) ret.append(link) return ret def extract_links(self, response): # wrapper needed to allow to work directly with text return self._extract_links(response.body, response.url, response.encoding) def reset(self): HTMLParser.reset(self) self.base_url = None self.current_link = None self.links = [] def handle_starttag(self, tag, attrs): if tag == 'base': self.base_url = dict(attrs).get('href') if self.scan_tag(tag): for attr, value in attrs: if self.scan_attr(attr): url = self.process_attr(value) link = Link(url=url) self.links.append(link) self.current_link = link def handle_endtag(self, tag): self.current_link = None def handle_data(self, data): if self.current_link and not self.current_link.text: self.current_link.text = data.strip() def matches(self, url): """This extractor matches with any url, since it doesn't contain any patterns""" return True
se ValidationError({ 'lag': f"The selected LAG interface ({self.lag}) belongs to {self.lag.device}, which is not part " f"of virtual chassis {self.device.virtual_chassis}." }) # A virtual interface cannot have a parent LAG if self.type == InterfaceTypeChoices.TYPE_VIRTUAL and self.lag is not None: raise ValidationError({'lag': "Virtual interfaces cannot have a parent LAG interface."}) # A LAG interface cannot be its own parent if self.pk and self.lag_id == self.pk: raise ValidationError({'lag': "A LAG interface cannot be its own parent."}) # Validate untagged VLAN if self.untagged_vlan and self.untagged_vlan.site not in [self.parent.site, None]: raise ValidationError({ 'untagged_vlan': "The untagged VLAN ({}) must belong to the same site as the interface's parent " "device, or it must be global".format(self.untagged_vlan) }) @property def parent(self): return self.device @property def is_connectable(self): return self.type not in NONCONNECTABLE_IFACE_TYPES @property def is_virtual(self): return self.type in VIRTUAL_IFACE_TYPES @property def is_wireless(self): return self.type in WIRELESS_IFACE_TYPES @property def is_lag(self): return self.type == InterfaceTypeChoices.TYPE_LAG @property def count_ipaddresses(self): return self.ip_addresses.count() # # Pass-through ports # @extras_features('webhooks', 'custom_links') class FrontPort(CableTermination, ComponentModel): """ A pass-through port on the front of a Device. """ type = models.CharField( max_length=50, choices=PortTypeChoices ) rear_port = models.ForeignKey( to='dcim.RearPort', on_delete=models.CASCADE, related_name='frontports' ) rear_port_position = models.PositiveSmallIntegerField( default=1, validators=[ MinValueValidator(REARPORT_POSITIONS_MIN), MaxValueValidator(REARPORT_POSITIONS_MAX) ] ) tags = TaggableManager(through=TaggedItem) csv_headers = ['device', 'name', 'label', 'type', 'rear_port', 'rear_port_position', 'description'] class Meta: ordering = ('device', '_name') unique_together = ( ('device', 'name'), ('rear_port', 'rear_port_position'), ) def get_absolute_url(self): return reverse('dcim:frontport', kwargs={'pk': self.pk}) def to_csv(self): return ( self.device.identifier, self.name, self.label, self.get_type_display(), self.rear_port.name, self.rear_port_position, self.description, ) def clean(self): super().clean() # Validate rear port assignment if self.rear_port.device != self.device: raise ValidationError({ "rear_port": f"Rear port ({self.rear_port}) must belong to the same device" }) # Validate rear port position assignment if self.rear_port_position > self.rear_port.positions: raise ValidationError({ "rear_port_position": f"Invalid rear port position ({self.rear_port_position}): Rear port " f"{self.rear_port.name} has only {self.rear_port.positions} positions" }) @extras_features('webhooks', 'custom_links') class RearPort(CableTermination, ComponentModel): """ A pass-through port on the rear of a Device. """ type = models.CharField( max_length=50, choices=PortTypeChoices ) positions = models.PositiveSmallIntegerField( default=1, validators=[ MinValueValidator(REARPORT_POSITIONS_MIN), MaxValueValidator(REARPORT_POSITIONS_MAX) ] ) tags = TaggableManager(through=TaggedItem) csv_headers = ['device', 'name', 'label', 'type', 'positions', 'description'] class Meta: ordering = ('device', '_name') unique_together = ('device', 'name') def get_absolute_url(self): return reverse('dcim:rearport', kwargs={'pk': self.pk}) def clean(self): super().clean() # Check that positions count is greater than or equal to the number of associated FrontPorts frontport_count = self.frontports.count() if self.positions < frontport_count: raise ValidationError({ "positions": f"The number of positions cannot be less than the number of mapped front ports " f"({frontport_count})" }) def to_csv(self): return ( self.device.identifier, self.name, self.label, self.get_type_display(), self.positions, self.description, ) # # Device bays # @extras_features('webhooks', 'custom_links') class DeviceBay(ComponentModel): """ An empty space within a Device which can house a child device """ installed_device = models.OneToOneField( to='dcim.Device', on_delete=models.SET_NULL, related_name='parent_bay', blank=True, null=True ) tags = TaggableManager(through=TaggedItem) csv_headers = ['device', 'name', 'label', 'installed_device', 'description'] class Meta: ordering = ('device', '_name') unique_together = ('device', 'name') def get_absolute_url(self): return reverse('dcim:devicebay', kwargs={'pk': self.pk}) def to_csv(self): return ( self.device.identifier, self.name, self.label, self.installed_device.identifier if self.installed_device else None, self.description, ) def clean(self): super().clean() # Validate that the parent Device can have DeviceBays if not self.device.device_type.is_parent_device: raise ValidationError("This type of device ({}) does not support device bays.".format( self.device.device_type )) # Cannot install a device into itself, obviously if self.device == self.installed_device: raise ValidationError("Cannot install a device into itself.") # Check that the installed device is not already installed elsewhere if self.installed_device: current_bay = DeviceBay.objects.filter(installed_device=self.installed_device).first() if current_bay and current_bay != self: raise ValidationError({ 'installed_device': "Cannot install the specified device; device is already installed in {}".format( current_bay ) }) # # Inventory items # @extras_features('export_templates', 'webhooks', 'custom_links') class InventoryItem(MPTTModel, ComponentModel): """ An InventoryItem represents a serialized piece of hardware within a Device, such as a line card or power supply. InventoryItems are used only for inventory purposes. """ parent = TreeForeignKey( to='self', on_delete=models.CASCADE, related_name='child_items', blank=True, null=True, db_index=True ) manufacturer = models.ForeignKey( to='dcim.Manufacturer', on_delete=models.PROTECT, related_name='inventory_items', blank=True, null=True ) part_id = models.CharField( max_length=50, verbose_name='Part ID', blank=True, help_text='Manufacturer-assigned part identifier' )
serial = models.CharField( max_length=50, verbose_name='Serial number', blank=True ) asset_tag = models.CharField( max_length=50, unique=True, blank=True, n
ull=True, verbose_name='Asset tag', help_text='A unique tag used to ident
''' GameData.py Last Updated: 3/16/17 ''' import json, os import numpy as np import pygame as pg from GameAssets import GameAssets as ga class GameData(): """ GameData class is used to stores game state information. """ def __init__(self): ''' Method initiates game state variables. ''' self.debug = False self.game_name = "SpaceManBash" self.delta_sum = 0 self.running = True # GameFrome Data self.frames = [] self.frame_current = None # Configs self.screen_dim = (800, 600) self.controls = { 'LEFT' : pg.K_a, 'RIGHT' : pg.K_d, 'UP' : pg.K_w, 'DOWN' : pg.K_s, 'CROUCH' : pg.K_LALT, 'ATTACK' : pg.K_j, 'ALTATTACK' : pg.K_k, 'JUMP' : pg.K_SPACE, 'SPRINT' : pg.K_LSHIFT, 'PAUSE' : pg.K_ESCAPE, 'ENTER' : pg.K_RETURN, 'HOME' : pg.K_h } # Save Data self.saves = [] self.save_index = None # Level Data self.levels = [] self.level_index = 0 self.level_background = None self.level_midground = None self.camera_pos = np.array([0.0, 0.0, 0.0, 0.0]) self.camera_limits = [0.0, 0.0, 0.0, 0.0] self.game_objects = [] self.collisions = {} self.level_scripts = [] self.script_vars = {} # Player Data self.player_pos = np.array([0.0, 0.0]) self.player_health = 100 def switch_frame(self, frame): ''' Method switches current frame to desired frame. Instantiates desired frame if not found. Param: frame ;GameFrame new current frame ''' for f in self.frames: if f.__class__.__name__ == frame: self.frame_current = f return module = __import__("GameFrames") class_ = getattr(module, frame) instance = class_(self) self.frames.append(instance) self.frame_current = self.frames[-1] def save_config(self, filename): ''' Method saves game data configurations to file. Param: filename ;str config filename ''' try: with open("../data/" + filename, "w") as f: data = {} data['controls'] = self.controls data['screen
_dim'] = self.screen_dim json_dump = json.dumps(data) f.write(json_dump) except Exception as e: print("Could Save Config:", filename) print(e) def load_config(self, filename): ''' Method loads game data configurations to file. Param:
filename ;str config filename ''' try: with open("../data/" + filename, "r") as f: for json_dump in f: data = json.loads(json_dump) self.controls = data['controls'] self.screen_dim = data['screen_dim'] except Exception as e: print("Could Load Config:", filename) print(e) def save_save(self, filename): ''' Method saves game data state to save file. Param: filename ;str save filename ''' try: with open("../data/saves/" + filename, "w") as f: data = {} data["level_index"] = self.level_index json_dump = json.dumps(data) f.write(json_dump + '\n') except Exception as e: print("Could Save Save Data:", filename) print(e) def load_save(self, filename): ''' Method loads game data state from save file. Param: filename ;str save filename ''' try: with open("../data/saves/" + filename, "r") as f: for json_dump in f: data = json.loads(json_dump) self.level_index = data["level_index"] except Exception as e: print("Could Load Save Data:", filename) print(e) def load_game_data(self): ''' Method loads all game level data from file. ''' for filename in sorted(os.listdir("../data/levels/")): if filename.endswith(".lev"): try: with open("../data/levels/" + filename, "r") as f: self.levels.append(f.read()) except Exception as e: print("Could Load Game Data:", filename) print(e) def load_level(self): ''' Method loads current level. ''' try: data = json.loads(self.levels[self.level_index]) self.camera_pos = np.array(data['camera_pos']) self.camera_limits = np.array(data['camera_limits']) for go in data['game_objects']: module = __import__("GameObjects") class_ = getattr(module, go[0]) instance = class_(go[1:]) self.add_game_object(instance) pg.mixer.music.load("../data/music/"+data['music']) pg.mixer.music.set_volume(0.15) pg.mixer.music.play(loops=3) self.level_background = getattr(ga, data['background']) self.level_midground = getattr(ga, data['midground']) for script in data['scripts']: self.add_level_script(script) except Exception as e: print("Couldn't Load Level:", self.level_index) print(e) def reset_level(self): ''' Method resets current level. ''' self.frame_current.level_loaded = False self.game_objects = [] self.collisions = {} self.load_level() def switch_level(self, index): ''' Method switches level. Param: index ;int index of desired level ''' self.level_index = index self.frame_current.level_loaded = False self.game_objects = [] self.collisions = {} self.save_save("save_0.sav") self.load_level() def add_game_object(self, game_object): ''' Method adds game object. Param: game_object ;GameObject ''' self.game_objects.append(game_object) def remove_game_object(self, game_object): ''' Method adds game object. Param: game_object ;GameObject ''' self.game_objects.remove(game_object) def add_level_script(self, script): ''' ''' self.level_scripts.append(script) def remove_level_script(self, script): ''' ''' self.level_scripts.remove(script) def update_collisions(self): ''' Method calculates collisions of game objects at current game state. Collisions are stored in self.collisions dictionary object. ''' self.collisions = {} for go in self.game_objects: temp = [] for goo in self.game_objects: if go != goo and go.check_collision(goo.rect): temp.append(goo) self.collisions[go] = temp def center_camera_on_game_object(self, game_object): ''' Method updates camera position to be centered on desired game object while remaining in the self.camera_limits boundaries. Param: game_object ;GameObject ''' x = -(game_object.rect[0] + (game_object.rect[2]/2.0)) + (self.screen_dim[0]/2.0) y = -(game_object.rect[1] + (game_object.rect[3]/2.0)) + (self.screen_dim[1]/2.0) if x < self.camera_limits[2] and x > self.camera_limits[0]: self.camera_pos[0] = x if y < self.camera_limits[3] and y > self.camera_limits[1]: self.camera_pos[1] = y
# -*- coding: utf-8 -*- # Third Party Stuff # Third Party Stuff from rest_framework.pagination import PageNumberPagination as DrfPageNumberPagination class PageNumberPagination(DrfPageNumberPagination): # Client can co
ntrol the page using this query parameter. page_query_param = 'page' # Client can control the page size using this query parameter. # Default is 'None'. Set to eg 'page_size' to enable usage. page_size_query_param = 'per_page' # Set to an integer to limit the maximum page size the client may request. # Only relevant if 'page_size_query_param' has also been set.
max_page_size = 1000