code
stringlengths
1
199k
import time from transitions import Machine class MessageState(object): states = ['initialised', 'waiting response', 'complete', 'timedout'] transitions = [ {'trigger': 'query', 'source': 'initialised', 'dest': 'waiting response', 'before': '_update', 'after': '_send_query'}, {'trigger': 'response', 'source': 'waiting response', 'dest': 'complete', 'before': '_update', 'after': '_completed'}, {'trigger': 'timeout', 'source': '*', 'dest': 'timedout', 'before': '_update', 'after': '_completed', }, ] def __init__(self, engine, txid, msg, callback=None, max_duration=5000, max_concurrency=3): self.engine = engine self.callback = callback self.machine = Machine(model=self, states=self.states, transitions=self.transitions, initial='initialised') self.start = self.last_change = time.time() * 1000 self.max_duration = max_duration self.max_concurrency = max_concurrency self.txid = txid self.times = {} self.parse_message(msg) self.query() def query(self): pass def parse_message(self, msg): self.val = msg.pop(0) def is_complete(self): return self.state in ['complete', 'timedout'] def pack_request(self): return None @staticmethod def unpack_response(content): return None @staticmethod def pack_response(content): return None def _update(self): now = time.time() * 1000 self.times.setdefault(self.state, 0.0) self.times[self.state] += (now - self.last_change) self.last_change = now def duration(self): return time.time() * 1000 - self.start def latency(self): return self.times.setdefault('waiting response', 0.0) def _send_query(self): pass def _completed(self): pass
from essence3.util import clamp class Align(object): def __init__(self, h, v = None): self.h = h self.v = h if v is None else v def __call__(self, node, edge): if edge in ('top', 'bottom'): return node.width * self.h if edge in ('left', 'right'): return node.height * self.v class FlowAlign(object): def __init__(self, h, v = None): self.h = h self.v = h if v is None else v def __call__(self, node, edge): if edge in ('top', 'bottom'): return node.flowline(edge, self.h) if edge in ('left', 'right'): return node.flowline(edge, self.v) def flow_simple(node, (low, high), edge, which): if which == 0: return low + node.offset1[0] + node[0].flowline(edge, which) if which == 2: return low + node.offset1[-2] + node[-1].flowline(edge, which) i = len(node) / 2 if which == 1: if len(node) % 2 == 1: return low + node.offset1[i] + node[i].flowline(edge, which) else: return low + (node.offset0[i] + node.offset1[i])*0.5 class Box(object): def __init__(self, (left, top, width, height), style): self.left = left self.top = top self.width = width self.height = height self.style = style def flowline(self, edge, which): if edge in ('top', 'bottom'): return self.width * (0.0, 0.5, 1.0)[which] if edge in ('left', 'right'): return self.height * (0.0, 0.5, 1.0)[which] def measure(self, parent): pass def arrange(self, parent, (left,top)): self.left = left self.top = top def render(self): background = self.style['background'] if background: background(self) def pick(self, (x,y), hits): return hits def subintrons(self, res): return res def traverse(self, res, cond): if cond(self): res.append(self) return res class Slate(Box): def __init__(self, (width, height), style): Box.__init__(self, (0, 0, width, height), style) class Label(Box): def __init__(self, source, style): self.source = source Box.__init__(self, (0, 0, 0, 0), style) self.offsets = None def flowline(self, edge, which): left, top, right, bottom = self.style['padding'] if edge in ('top', 'bottom'): return self.width * (0.0, 0.5, 1.0)[which] + left if edge in ('left', 'right'): if which == 0: return top if which == 1: return top + self.style['font'].mathline * self.style['font_size'] if which == 2: return top + self.style['font'].baseline * self.style['font_size'] def measure(self, parent): left, top, right, bottom = self.style['padding'] self.offsets = self.style['font'].measure(self.source, self.style['font_size']) self.width = left + right + self.offsets[-1] self.height = top + bottom + self.style['font'].lineheight * self.style['font_size'] def arrange(self, parent, (left,top)): self.left = left self.top = top def render(self): background = self.style['background'] if background: background(self) self.style['font'](self) def selection_rect(self, start, stop): left, top, right, bottom = self.style['padding'] x0 = self.offsets[start] x1 = self.offsets[stop] return (self.left + left + x0 - 1, self.top, x1-x0 + 2, self.height) def scan_offset(self, (x,y)): left, top, right, bottom = self.style['padding'] x -= self.left + left k = 0 best = abs(x - 0) for index, offset in enumerate(self.offsets): v = abs(x - offset) if v <= best: best = v k = index return k, best ** 2.0 + abs(y - clamp(self.top, self.top + self.height, y)) ** 2.0 class Container(Box): def __init__(self, nodes, style): self.nodes = nodes self.offset0 = [0] * (len(nodes) + 1) self.offset1 = [0] * (len(nodes) + 1) self.flow0 = [0] * len(nodes) self.flow1 = [0] * len(nodes) self.base0 = 0 self.base1 = 0 Box.__init__(self, (0, 0, 0, 0), style) def __getitem__(self, i): return self.nodes[i] def __iter__(self): return iter(self.nodes) def __len__(self): return len(self.nodes) def render(self): background = self.style['background'] if background: background(self) for node in self: node.render() def pick(self, (x,y), hits): for node in self: res = node.pick((x,y), hits) return hits def subintrons(self, res): for node in self: res = node.subintrons(res) return res def traverse(self, res, cond): if cond(self): res.append(self) for node in self: res = node.traverse(res, cond) return res class HBox(Container): def flowline(self, edge, which): left, top, right, bottom = self.style['padding'] if edge == 'left': return top + self.base0 - self.flow0[0] + self[0].flowline(edge, which) elif edge == 'right': return top + self.base1 - self.flow1[-1] + self[-1].flowline(edge, which) else: return self.style['flow'](self, (left, self.width-right), edge, which) def measure(self, parent): offset = cap = 0 low = org = high = 0 for i, node in enumerate(self): node.measure(self) self.offset0[i] = cap self.offset1[i] = offset self.flow0[i] = f0 = self.style['align'](node, 'left') self.flow1[i] = f1 = self.style['align'](node, 'right') low = min(low, 0 - f0) high = max(high, node.height - f0) low += f0 - f1 org += f0 - f1 high += f0 - f1 cap = offset + node.width offset += node.width + self.style['spacing'] self.offset0[len(self)] = self.offset1[len(self)] = cap self.base0 = org - low self.base1 = 0 - low left, top, right, bottom = self.style['padding'] self.width = cap + left + right self.height = high - low + top + bottom def arrange(self, parent, (left,top)): self.left = left self.top = top left, top, right, bottom = self.style['padding'] base_x = self.left + left base_y = self.base0 + self.top + top for i, node in enumerate(self): node.arrange(self, (base_x + self.offset1[i], base_y - self.flow0[i])) base_y += self.flow1[i] - self.flow0[i] def get_spacer(self, i): left, top, right, bottom = self.style['padding'] x0 = self.offset0[i] x1 = self.offset1[i] return self.left + left+x0, self.top + top, x1-x0, self.height-bottom-top class VBox(Container): def flowline(self, edge, which): left, top, right, bottom = self.style['padding'] if edge == 'top': return left + self.base0 - self.flow0[0] + self[0].flowline(edge, which) elif edge == 'bottom': return left + self.base1 - self.flow1[-1] + self[-1].flowline(edge, which) else: return self.style['flow'](self, (top, self.height-bottom), edge, which) def measure(self, parent): offset = cap = 0 low = org = high = 0 for i, node in enumerate(self): node.measure(self) self.offset0[i] = cap self.offset1[i] = offset self.flow0[i] = f0 = self.style['align'](node, 'top') self.flow1[i] = f1 = self.style['align'](node, 'bottom') low = min(low, 0 - f0) high = max(high, node.width - f0) low += f0 - f1 org += f0 - f1 high += f0 - f1 cap = offset + node.height offset += node.height + self.style['spacing'] self.offset0[len(self)] = self.offset1[len(self)] = cap self.base0 = org - low self.base1 = 0 - low left, top, right, bottom = self.style['padding'] self.height = cap + top + bottom self.width = high - low + left + right def arrange(self, parent, (left,top)): self.left = left self.top = top left, top, right, bottom = self.style['padding'] base_x = self.base0 + self.left + left base_y = self.top + top for i, node in enumerate(self): node.arrange(self, (base_x - self.flow0[i], base_y + self.offset1[i])) base_x += self.flow1[i] - self.flow0[i] def get_spacer(self, i): left, top, right, bottom = self.style['padding'] y0 = self.offset0[i] y1 = self.offset1[i] return self.left + left, self.top + y0+top, self.width - right-left, y1-y0 class Intron(Box): def __init__(self, source, index, generator): self.source = source self.index = index self.generator = generator self.rebuild() def rebuild(self): self.node, self.style = self.generator(self.source) def flowline(self, edge, which): left, top, right, bottom = self.style['padding'] if edge in ('left', 'right'): x0 = top if edge in ('top', 'bottom'): x0 = left return x0 + self.node.flowline(edge, which) def measure(self, parent): left, top, right, bottom = self.style['padding'] min_width = self.style['min_width'] min_height = self.style['min_height'] self.node.measure(self) self.width = max(min_width, self.node.width + left + right) self.height = max(min_height, self.node.height + top + bottom) def arrange(self, parent, (left, top)): self.left = left self.top = top left, top, right, bottom = self.style['padding'] inner_width = self.width - left - right inner_height = self.height - top - bottom x = self.left + left + (inner_width - self.node.width)*0.5 y = self.top + top + (inner_height - self.node.height)*0.5 self.node.arrange(self, (x,y)) def render(self): background = self.style['background'] if background: background(self) self.node.render() def pick(self, (x,y), hits=None): if hits == None: hits = [] if 0 <= x - self.left < self.width and 0 <= y - self.top < self.height: hits.append(self) return self.node.pick((x,y), hits) def subintrons(self, res=None): if res == None: return self.node.subintrons([]) else: res.append(self) return res def find_context(self, intron): if intron == self: return () for subintron in self.subintrons(): match = subintron.find_context(intron) if match is not None: return (self,) + match def traverse(self, res, cond): if cond(self): res.append(self) return self.node.traverse(res, cond) def scan_offset(self, (x,y)): left = self.left right = self.left + self.width top = self.top bottom = self.top + self.height b0 = (x - left)**2 + (y - top)**2 b1 = (x - right)**2 + (y - bottom)**2 b = (x - clamp(left, right, x))**2 + (y - clamp(top, bottom, y))**2 if b0 < b1: return self.index, b else: return self.index+1, b def solve(root, (left, top)): root.measure(None) root.arrange(None, (left, top))
import os import sys import logging as log from glob import glob from laniakea import LkModule from laniakea.dud import Dud from laniakea.utils import get_dir_shorthand_for_uuid, random_string from laniakea.db import session_scope, Job, JobResult, JobKind, SourcePackage from laniakea.msgstream import EventEmitter from .rubiconfig import RubiConfig from .utils import safe_rename def accept_upload(conf, dud, event_emitter): ''' Accept the upload and move its data to the right places. ''' job_success = dud.get('X-Spark-Success') == 'Yes' job_id = dud.get('X-Spark-Job') # mark job as accepted and done with session_scope() as session: job = session.query(Job).filter(Job.uuid == job_id).one_or_none() if not job: log.error('Unable to mark job \'{}\' as done: The Job was not found.'.format(job_id)) # this is a weird situation, there is no proper way to handle it as this indicates a bug # in the Laniakea setup or some other oddity. # The least harmful thing to do is to just leave the upload alone and try again later. return job.result = JobResult.SUCCESS if job_success else JobResult.FAILURE job.latest_log_excerpt = None # move the log file and Firehose reports to the log storage log_target_dir = os.path.join(conf.log_storage_dir, get_dir_shorthand_for_uuid(job_id)) firehose_target_dir = os.path.join(log_target_dir, 'firehose') for fname in dud.get_files(): if fname.endswith('.log'): os.makedirs(log_target_dir, exist_ok=True) # move the logfile to its destination and ensure it is named correctly target_fname = os.path.join(log_target_dir, job_id + '.log') safe_rename(fname, target_fname) elif fname.endswith('.firehose.xml'): os.makedirs(firehose_target_dir, exist_ok=True) # move the firehose report to its own directory and rename it fh_target_fname = os.path.join(firehose_target_dir, job_id + '.firehose.xml') safe_rename(fname, fh_target_fname) # handle different job data if job.module == LkModule.ISOTOPE: from .import_isotope import handle_isotope_upload handle_isotope_upload(session, success=job_success, conf=conf, dud=dud, job=job, event_emitter=event_emitter) elif job.kind == JobKind.PACKAGE_BUILD: # the package has been imported by Dak, so we just announce this # event to the world spkg = session.query(SourcePackage) \ .filter(SourcePackage.source_uuid == job.trigger) \ .filter(SourcePackage.version == job.version) \ .one_or_none() if spkg: suite_target_name = '?' if job.data: suite_target_name = job.data.get('suite', '?') event_data = {'pkgname': spkg.name, 'version': job.version, 'architecture': job.architecture, 'suite': suite_target_name, 'job_id': job_id} if job_success: event_emitter.submit_event_for_mod(LkModule.ARCHIVE, 'package-build-success', event_data) else: event_emitter.submit_event_for_mod(LkModule.ARCHIVE, 'package-build-failed', event_data) else: event_emitter.submit_event('upload-accepted', {'job_id': job_id, 'job_failed': not job_success}) # remove the upload description file from incoming os.remove(dud.get_dud_file()) log.info("Upload {} accepted.", dud.get_filename()) def reject_upload(conf, dud, reason='Unknown', event_emitter=None): ''' If a file has issues, we reject it and put it into the rejected queue. ''' os.makedirs(conf.rejected_dir, exist_ok=True) # move the files referenced by the .dud file random_suffix = random_string(4) for fname in dud.get_files(): target_fname = os.path.join(conf.rejected_dir, os.path.basename(fname)) if os.path.isfile(target_fname): target_fname = target_fname + '+' + random_suffix # move the file to the rejected dir safe_rename(fname, target_fname) # move the .dud file itself target_fname = os.path.join(conf.rejected_dir, dud.get_filename()) if os.path.isfile(target_fname): target_fname = target_fname + '+' + random_suffix safe_rename(dud.get_dud_file(), target_fname) # also store the reject reason for future reference with open(target_fname + '.reason', 'w') as f: f.write(reason + '\n') log.info('Upload {} rejected.', dud.get_filename()) if event_emitter: event_emitter.submit_event('upload-rejected', {'dud_filename': dud.get_filename(), 'reason': reason}) def import_files_from(conf, incoming_dir): ''' Import files from an untrusted incoming source. IMPORTANT: We assume that the uploader can not edit their files post-upload. If they could, we would be vulnerable to timing attacks here. ''' emitter = EventEmitter(LkModule.RUBICON) for dud_file in glob(os.path.join(incoming_dir, '*.dud')): dud = Dud(dud_file) try: dud.validate(keyrings=conf.trusted_gpg_keyrings) except Exception as e: reason = 'Signature validation failed: {}'.format(str(e)) reject_upload(conf, dud, reason, emitter) continue # if we are here, the file is good to go accept_upload(conf, dud, emitter) def import_files(options): conf = RubiConfig() if not options.incoming_dir: print('No incoming directory set. Can not process any files.') sys.exit(1) import_files_from(conf, options.incoming_dir)
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('web', '0008_contactphone_place_on_header'), ] operations = [ migrations.AddField( model_name='contactemail', name='place_on_header', field=models.BooleanField(default=False, verbose_name='Размещать в заголовке'), ), migrations.AddField( model_name='contactperson', name='photo', field=models.ImageField(blank=True, null=True, upload_to='', verbose_name='Фото'), ), ]
''' Quick Start Guide For Using This Module ======================================= This module implements a Log Manager class which wraps the Python logging module and provides some utility functions for use with logging. All logging operations should be done through the `LogManager` where available. *DO NOT create objects using the Python logging module, the log manager will be unaware of them.* This module was designed for ease of use while preserving advanced functionality and performance. You must perform the following steps. 1. Import the log_manger module and instantiate *one* `LogManager` instance for your application or library. The `LogManager` is configured via `LogManager.configure()` whose values are easily populated from command line options or a config file. You can modify the configuration again at any point. 2. Create one or more output handlers via `LogManager.create_log_handlers()` an easy to use yet powerful interface. 3. In your code create loggers via `LogManager.get_logger()`. Since loggers are normally bound to a class this method is optimized for that case, all you need to do in the call ``__init__()`` is:: log_mgr.get_logger(self, True) Then emitting messages is as simple as ``self.debug()`` or ``self.error()`` Example: -------- :: # Step 1, Create log manager and configure it prog_name = 'my_app' log_mgr = LogManager(prog_name) log_mgr.configure(dict(verbose=True)) # Step 2, Create handlers log_mgr.create_log_handlers([dict(name='my_app stdout', stream=sys.stdout, level=logging.INFO), dict(name='my_app file', filename='my_app.log', level=logging.DEBUG)]) # Step 3, Create and use a logger in your code class FooBar: def __init__(self, name): log_mgr.get_logger(self, True) self.info("I'm alive! %s", name) foobar = FooBar('Dr. Frankenstein') # Dump the log manager state for illustration print print log_mgr Running the above code would produce:: <INFO>: I'm alive! Dr. Frankenstein root_logger_name: my_app configure_state: None default_level: INFO debug: False verbose: True number of loggers: 2 "my_app" [level=INFO] "my_app.__main__.FooBar" [level=INFO] number of handlers: 2 "my_app file" [level=DEBUG] "my_app stdout" [level=INFO] number of logger regexps: 0 *Note, Steps 1 & 2 were broken out for expository purposes.* You can pass your handler configuration into `LogManager.configure()`. The above could have been simpler and more compact.:: # Step 1 & 2, Create log manager, and configure it and handlers prog_name = 'my_app' log_mgr = LogManager(prog_name) log_mgr.configure(dict(verbose=True, handlers = [dict(name='my_app stdout', stream=sys.stdout, level=logging.INFO), dict(name='my_app file', filename='my_app.log', level=logging.DEBUG)]) FAQ (Frequently Asked Questions) ================================ for the LogManager doesn't seem much different in complexity from basicConfig?** * You get independent logging namespaces. You can instantiate multiple logging namespaces. If you use this module you'll be isolated from other users of the Python logging module avoiding conflicts. * Creating and initializing loggers for classes is trivial. One simple call creates the logger, configures it, and sets logging methods on the class instance. * You can easily configure individual loggers to different levels. For example turn on debuging for just the part of the code you're working on. * The configuration is both simple and powerful. You get many more options than with basicConfig. * You can dynamically reset the logging configuration during execution, you're not forced to live with the config established during program initialization. * The manager optimizes the use of the logging objects, you'll spend less time executing pointless logging code for messages that won't be emitted. * You can see the state of all the logging objects in your namespace from one centrally managed location. * You can configure a LogManager to use the standard logging root logger and get all the benefits of this API. affecting the rest of the logging configuration?** Use a logger regular expression to bind a custom level to loggers whose name matches the regexp. See `LogManager.configure()` for details. Lets say you want to set your Foo.Bar class to debug, then do this:: log_mgr.configure(dict(logger_regexps=[(r'Foo\.Bar', 'debug')])) with a higher level, what happened?** You probably don't have any handlers defined at or below the default_level. The level set on a logger will never be lower than the lowest level handler available to that logger. See above. Logger's will never have a level less than the level of the handlers visible to the logger. If there are no handlers then loggers can't output anything so their level is set to maxsize. at INFO or DEBUG, what happened?** The verbose and debug config flags set the default_level to INFO and DEBUG respectively as a convenience. wrong?** For a message to be emitted the following 3 conditions must hold: * Message level >= logger's level * Message level >= handler's level * The message was not elided by a filter To verify the above conditions hold print out the log manager state (e.g. print log_mgr). Locate your logger, what level is at? Locate the handler you expected to see the message appear on, what level is it? A General Discussion of Python Logging ====================================== The design of this module is driven by how the Python logging module works. The following discussion complements the Python Logging Howto, fills in some missing information and covers strategies for implementing different functionality along with the trade-offs involved. Understanding when & how log messages are emitted: -------------------------------------------------- Loggers provide the application interface for logging. Every logger object has the following methods debug(), info(), warning(), error(), critical(), exception() and log() all of which can accept a format string and arguments. Applications generate logging messages by calling one of these methods to produce a formatted message. A logger's effective level is the first explicitly set level found when searching from the logger through it's ancestors terminating at the root logger. The root logger always has an explicit level (defaults to WARNING). For a message to be emitted by a handler the following must be true: The logger's effective level must >= message level and it must not be filtered by a filter attached to the logger, otherwise the message is discarded. If the message survives the logger check it is passed to a list of handlers. A handler will emit the message if the handler's level >= message level and its not filtered by a filter attached to the handler. The list of handlers is determined thusly: Each logger has a list of handlers (which may be empty). Starting with the logger the message was bound to the message is passed to each of it's handlers. Then the process repeats itself by traversing the chain of loggers through all of it's ancestors until it reaches the root logger. The logger traversal will be terminated if the propagate flag on a logger is False (by default propagate is True). Let's look at a hypothetical logger hierarchy (tree):: A / \\ B D / C There are 4 loggers and 3 handlers Loggers: +-------+---------+---------+-----------+----------+ |Logger | Level | Filters | Propagate | Handlers | +=======+=========+=========+===========+==========+ | A | WARNING | [] | False | [h1,h2] | +-------+---------+---------+-----------+----------+ | A.B | ERROR | [] | False | [h3] | +-------+---------+---------+-----------+----------+ | A.B.C | DEBUG | [] | True | | +-------+---------+---------+-----------+----------+ | A.D | | [] | True | | +-------+---------+---------+-----------+----------+ Handlers: +---------+---------+---------+ | Handler | Level | Filters | +=========+=========+=========+ | h1 | ERROR | [] | +---------+---------+---------+ | h2 | WARNING | [] | +---------+---------+---------+ | h3 | DEBUG | [] | +---------+---------+---------+ Each of the loggers and handlers have empty filter lists in this example thus the filter checks will always pass. If a debug message is posted logger A.B.C the following would happen. The effective level is determined. Since it does not have a level set it's parent (A.B) is examined which has ERROR set, therefore the effective level of A.B.C is ERROR. Processing immediately stops because the logger's level of ERROR does not permit debug messages. If an error message is posted on logger A.B.C it passes the logger level check and filter check therefore the message is passed along to the handlers. The list of handlers on A.B.C is empty so no handlers are called at this position in the logging hierarchy. Logger A.B.C's propagate flag is True so parent logger A.B handlers are invoked. Handler h3's level is DEBUG, it passes both the level and filter check thus h3 emits the message. Processing now stops because logger A.B's propagate flag is False. Now let's see what would happen if a warning message was posted on logger A.D. It's effective level is WARNING because logger A.D does not have a level set, it's only ancestor is logger A, the root logger which has a level of WARNING, thus logger's A.D effective level is WARNING. Logger A.D has no handlers, it's propagate flag is True so the message is passed to it's parent logger A, the root logger. Logger A has two handlers h1 and h2. The level of h1 is ERROR so the warning message is discarded by h1, nothing is emitted by h1. Next handler h2 is invoked, it's level is WARNING so it passes both the level check and the filter check, thus h2 emits the warning message. How to configure independent logging spaces: -------------------------------------------- A common idiom is to hang all handlers off the root logger and set the root loggers level to the desired verbosity. But this simplistic approach runs afoul of several problems, in particular who controls logging (accomplished by configuring the root logger). The usual advice is to check and see if the root logger has any handlers set, if so someone before you has configured logging and you should inherit their configuration, all you do is add your own loggers without any explicitly set level. If the root logger doesn't have handlers set then you go ahead and configure the root logger to your preference. The idea here is if your code is being loaded by another application you want to defer to that applications logging configuration but if your code is running stand-alone you need to set up logging yourself. But sometimes your code really wants it's own logging configuration managed only by yourself completely independent of any logging configuration by someone who may have loaded your code. Even if you code is not designed to be loaded as a package or module you may be faced with this problem. A trivial example of this is running your code under a unit test framework which itself uses the logging facility (remember there is only ever one root logger in any Python process). Fortunately there is a simple way to accommodate this. All you need to do is create a "fake" root in the logging hierarchy which belongs to you. You set your fake root's propagate flag to False, set a level on it and you'll hang your handlers off this fake root. Then when you create your loggers each should be a descendant of this fake root. Now you've completely isolated yourself in the logging hierarchy and won't be influenced by any other logging configuration. As an example let's say your your code is called 'foo' and so you name your fake root logger 'foo'.:: my_root = logging.getLogger('foo') # child of the root logger my_root.propagate = False my_root.setLevel(logging.DEBUG) my_root.addHandler(my_handler) Then every logger you create should have 'foo.' prepended to it's name. If you're logging my module your module's logger would be created like this:: module_logger = logging.getLogger('foo.%s' % __module__) If you're logging by class then your class logger would be:: class_logger = logging.getLogger('foo.%s.%s' % (self.__module__, self.__class__.__name__)) How to set levels: ------------------ An instinctive or simplistic assumption is to set the root logger to a high logging level, for example ERROR. After all you don't want to be spamming users with debug and info messages. Let's also assume you've got two handlers, one for a file and one for the console, both attached to the root logger (a common configuration) and you haven't set the level on either handler (in which case the handler will emit all levels). But now let's say you want to turn on debugging, but just to the file, the console should continue to only emit error messages. You set the root logger's level to DEBUG. The first thing you notice is that you're getting debug message both in the file and on the console because the console's handler does not have a level set. Not what you want. So you go back restore the root loggers level back to it's original ERROR level and set the file handler's level to DEBUG and the console handler's level to ERROR. Now you don't get any debug messages because the root logger is blocking all messages below the level of ERROR and doesn't invoke any handlers. The file handler attached to the root logger even though it's level is set to DEBUG never gets a chance to process the message. *IMPORTANT:* You have to set the logger's level to the minimum of all the attached handler's levels, otherwise the logger may block the message from ever reaching any handler. In this example the root logger's level must be set to DEBUG, the file handler's level to DEBUG, and the console handler's level set to ERROR. Now let's take a more real world example which is a bit more complicated. It's typical to assign loggers to every major class. In fact this is the design strategy of Java logging from which the Python logging is modeled. In a large complex application or library that means dozens or possibly hundreds of loggers. Now lets say you need to trace what is happening with one class. If you use the simplistic configuration outlined above you'll set the log level of the root logger and one of the handlers to debug. Now you're flooded with debug message from every logger in the system when all you wanted was the debug messages from just one class. How can you get fine grained control over which loggers emit debug messages? Here are some possibilities: (1) Set a filter. ................. When a message is propagated to a logger in the hierarchy first the loggers level is checked. If logger level passes then the logger iterates over every handler attached to the logger first checking the handler level. If the handler level check passes then the filters attached to the handler are run. Filters are passed the record (i.e. the message), it does not have access to either the logger or handler it's executing within. You can't just set the filter to only pass the records of the classes you want to debug because that would block other important info, warning, error and critical messages from other classes. The filter would have to know about the "global" log level which is in effect and also pass any messages at that level or higher. It's unfortunate the filter cannot know the level of the logger or handler it's executing inside of. Also logger filters only are applied to the logger they are attached to, i.e. the logger the message was generated on. They do not get applied to any ancestor loggers. That means you can't just set a filter on the root logger. You have to either set the filters on the handlers or on every logger created. The filter first checks the level of the message record. If it's greater than debug it passes it. For debug messages it checks the set of loggers which have debug messages enabled, if the message record was generated on one of those loggers it passes the record, otherwise it blocks it. The only question is whether you attach the filter to every logger or to a handful of handlers. The advantage of attaching the filter to every logger is efficiency, the time spent handling the message can be short circuited much sooner if the message is filtered earlier in the process. The advantage of attaching the filter to a handler is simplicity, you only have to do that when a handler is created, not every place in the code where a logger is created. (2) Conditionally set the level of each logger. ............................................... When loggers are created a check is performed to see if the logger is in the set of loggers for which debug information is desired, if so it's level is set to DEBUG, otherwise it's set to the global level. One has to recall there really isn't a single global level if you want some handlers to emit info and above, some handlers error and above, etc. In this case if the logger is not in the set of logger's emitting debug the logger level should be set to the next increment above debug level. A good question to ask would be why not just leave the logger's level unset if it's not in the set of loggers to be debugged? After all it will just inherit the root level right? There are two problems with that. 1) It wold actually inherit the level any ancestor logger and if an ancestor was set to debug you've effectively turned on debugging for all children of that ancestor logger. There are times you might want that behavior, where all your children inherit your level, but there are many cases where that's not the behavior you want. 2) A more pernicious problem exists. The logger your handlers are attached to MUST be set to debug level, otherwise your debug messages will never reach the handlers for output. Thus if you leave a loggers level unset and let it inherit it's effective level from an ancestor it might very well inherit the debug level from the root logger. That means you've completely negated your attempt to selectively set debug logging on specific loggers. Bottom line, you really have to set the level on every logger created if you want fine grained control. Approach 2 has some distinct performance advantages. First of all filters are not used, this avoids a whole processing step and extra filter function calls on every message. Secondly a logger level check is a simple integer compare which is very efficient. Thirdly the processing of a message can be short circuited very early in the processing pipeline, no ancestor loggers will be invoked and no handlers will be invoked. The downside is some added complexity at logger creation time. But this is easily mitigated by using a utility function or method to create the logger instead of just calling logger.getLogger(). Like every thing else in computer science which approach you take boils down to a series of trade offs, most around how your code is organized. You might find it easier to set a filter on just one or two handlers. It might be easier to modify the configuration during execution if the logic is centralized in just a filter function, but don't let that sway you too much because it's trivial to iterate over every logger and dynamically reset it's log level. Now at least you've got a basic understanding of how this stuff hangs together and what your options are. That's not insignificant, when I was first introduced to logging in Java and Python I found it bewildering difficult to get it do what I wanted. John Dennis <jdennis@redhat.com> ''' from __future__ import print_function import sys import os import pwd import logging import re import time import six LOGGING_DEFAULT_FORMAT = '%(levelname)s %(message)s' log_level_name_map = { 'notset' : logging.NOTSET, 'debug' : logging.DEBUG, 'info' : logging.INFO, 'warn' : logging.WARNING, 'warning' : logging.WARNING, 'error' : logging.ERROR, 'critical' : logging.CRITICAL } log_levels = (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL) logger_method_names = ('debug', 'info', 'warning', 'error', 'exception', 'critical') def get_unique_levels(iterable): ''' Given a iterable of objects containing a logging level return a ordered list (min to max) of unique levels. :parameters: iterable Iterable yielding objects with a logging level attribute. :returns: Ordered list (min to max) of unique levels. ''' levels = set() for obj in iterable: level = getattr(obj, 'level', sys.maxsize) if level != logging.NOTSET: levels.add(level) levels = list(levels) levels.sort() return levels def get_minimum_level(iterable): ''' Given a iterable of objects containing a logging level return the minimum level. If no levels are defined return maxsize. set of unique levels. :parameters: iterable Iterable yielding objects with a logging level attribute. :returns: Ordered list (min to max) of unique levels. ''' min_level = sys.maxsize for obj in iterable: level = getattr(obj, 'level', sys.maxsize) if level != logging.NOTSET: if level < min_level: min_level = level return min_level def parse_log_level(level): ''' Given a log level either as a string or integer return a numeric logging level. The following case insensitive names are recognized:: * notset * debug * info * warn * warning * error * critical A string containing an integer is also recognized, for example ``"10"`` would map to ``logging.DEBUG`` The integer value must be the range [``logging.NOTSET``, ``logging.CRITICAL``] otherwise a value exception will be raised. :parameters: level basestring or integer, level value to convert :returns: integer level value ''' # Is it a string representation of an integer? # If so convert to an int. if isinstance(level, six.string_types): try: level = int(level) except ValueError: pass # If it's a string lookup it's name and map to logging level # otherwise validate the integer value is in range. if isinstance(level, six.string_types): result = log_level_name_map.get(level.lower()) #pylint: disable=E1103 if result is None: raise ValueError('unknown log level (%s)' % level) return result elif isinstance(level, int): if level < logging.NOTSET or level > logging.CRITICAL: raise ValueError('log level (%d) out of range' % level) return level else: raise TypeError('log level must be basestring or int, got (%s)' % type(level)) def logging_obj_str(obj): ''' Unfortunately the logging Logger and Handler classes do not have a custom __str__() function which converts the object into a human readable string representation. This function takes any object with a level attribute and outputs the objects name with it's associated level. If a name was never set for the object then it's repr is used instead. :parameters: obj Object with a logging level attribute :returns: string describing the object ''' name = getattr(obj, 'name', repr(obj)) text = '"%s" [level=%s]' % (name, logging.getLevelName(obj.level)) if isinstance(obj, logging.FileHandler): text += ' filename="%s"' % obj.baseFilename return text class LogManager(object): ''' This class wraps the functionality in the logging module to provide an easier to use API for logging while providing advanced features including a independent namespace. Each application or library wishing to have it's own logging namespace should instantiate exactly one instance of this class and use it to manage all it's logging. Traditionally (or simplistically) logging was set up with a single global root logger with output handlers bound to it. The global root logger (whose name is the empty string) was shared by all code in a loaded process. The only the global unamed root logger had a level set on it, all other loggers created inherited this global level. This can cause conflicts in more complex scenarios where loaded code wants to maintain it's own logging configuration independent of whomever loaded it's code. By using only a single logger level set on the global root logger it was not possible to have fine grained control over individual logger output. The pattern seen with this simplistic setup has been frequently copied despite being clumsy and awkward. The logging module has the tools available to support a more sophisitcated and useful model, but it requires an overarching framework to manage. This class provides such a framework. The features of this logging manager are: * Independent logging namespace. * Simplifed method to create handlers. * Simple setup for applications with command line args. * Sophisitcated handler configuration (e.g. file ownership & permissions) * Easy fine grained control of logger output (e.g. turning on debug for just 1 or 2 loggers) * Holistic management of the interrelationships between logging components. * Ability to dynamically adjust logging configuration in a running process. An independent namespace is established by creating a independent root logger for this manager (root_logger_name). This root logger is a direct child of the global unamed root logger. All loggers created by this manager will be descendants of this managers root logger. The managers root logger has it's propagate flag set to False which means all loggers and handlers created by this manager will be isolated in the global logging tree. Log level management: --------------------- Traditionally loggers inherited their logging level from the root logger. This was simple but made it impossible to independently control logging output from different loggers. If you set the root level to DEBUG you got DEBUG output from every logger in the system, often overwhelming in it's voluminous output. Many times you want to turn on debug for just one class (a common idom is to have one logger per class). To achieve the fine grained control you can either use filters or set a logging level on every logger (see the module documentation for the pros and cons). This manager sets a log level on every logger instead of using level inheritence because it's more efficient at run time. Global levels are supported via the verbose and debug flags setting every logger level to INFO and DEBUG respectively. Fine grained level control is provided via regular expression matching on logger names (see `configure()` for the details. For example if you want to set a debug level for the foo.bar logger set a regular expression to match it and bind it to the debug level. Note, the global verbose and debug flags always override the regular expression level configuration. Do not set these global flags if you want fine grained control. The manager maintains the minimum level for all loggers under it's control and the minimum level for all handlers under it's control. The reason it does this is because there is no point in generating debug messages on a logger if there is no handler defined which will output a debug message. Thus when the level is set on a logger it takes into consideration the set of handlers that logger can emit to. IMPORTANT: Because the manager maintains knowledge about all the loggers and handlers under it's control it is essential you use only the managers interface to modify a logger or handler and not set levels on the objects directly, otherwise the manger will not know to visit every object under it's control when a configuraiton changes (see '`LogManager.apply_configuration()`). Example Usage:: # Create a log managers for use by 'my_app' log_mgr = LogManager('my_app') # Create a handler to send error messages to stderr log_mgr.create_log_handlers([dict(stream=sys.stdout, level=logging.ERROR)]) # Create logger for a class class Foo(object): def __init__(self): self.log = log_mgr.get_logger(self) ''' def __init__(self, root_logger_name='', configure_state=None): ''' Create a new LogManager instance using root_logger_name as the parent of all loggers maintained by the manager. Only one log manger should be created for each logging namespace. :parameters: root_logger_name The name of the root logger. All loggers will be prefixed by this name. configure_state Used by clients of the log manager to track the configuration state, may be any object. :return: LogManager instance ''' self.loggers = {} # dict, key is logger name, value is logger object self.handlers = {} # dict, key is handler name, value is handler object self.configure_state = configure_state self.root_logger_name = root_logger_name self.default_level = 'error' self.debug = False self.verbose = False self.logger_regexps = [] self.root_logger = self.get_logger(self.root_logger_name) # Stop loggers and handlers from searching above our root self.root_logger.propagate = False def _get_default_level(self): return self._default_level def _set_default_level(self, value): level = parse_log_level(value) self._default_level = level self.apply_configuration() default_level = property(_get_default_level, _set_default_level, doc='see log_manager.parse_log_level()` for details on how the level can be specified during assignement.') def set_default_level(self, level, configure_state=None): ''' Reset the default logger level, updates all loggers. Note, the default_level may also be set by assigning to the default_level attribute but that does not update the configure_state, this method is provided as a convenience to simultaneously set the configure_state if so desired. :parameters: level The new default level for the log manager. See `log_manager.parse_log_level()` for details on how the level can be specified. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' level = parse_log_level(level) self._default_level = level self.apply_configuration(configure_state) def __str__(self): ''' When str() is called on the LogManager output it's state. ''' text = '' text += 'root_logger_name: %s\n' % (self.root_logger_name) text += 'configure_state: %s\n' % (self.configure_state) text += 'default_level: %s\n' % (logging.getLevelName(self.default_level)) text += 'debug: %s\n' % (self.debug) text += 'verbose: %s\n' % (self.verbose) text += 'number of loggers: %d\n' % (len(self.loggers)) loggers = [logging_obj_str(x) for x in self.loggers.values()] loggers.sort() for logger in loggers: text += ' %s\n' % (logger) text += 'number of handlers: %d\n' % (len(self.handlers)) handlers = [logging_obj_str(x) for x in self.handlers.values()] handlers.sort() for handler in handlers: text += ' %s\n' % (handler) text += 'number of logger regexps: %d\n' % (len(self.logger_regexps)) for regexp, level in self.logger_regexps: text += ' "%s" => %s\n' % (regexp, logging.getLevelName(level)) return text def configure(self, config, configure_state=None): ''' The log manager is initialized from key,value pairs in the config dict. This may be called any time to modify the logging configuration at run time. The supported entries in the config dict are: default_level The default level applied to a logger when not indivdually configured. The verbose and debug config items override the default level. See `log_manager.parse_log_level()` for details on how the level can be specified. verbose Boolean, if True sets default_level to INFO. debug Boolean, if True sets default_level to DEBUG. logger_regexps List of (regexp, level) tuples. This is a an ordered list regular expressions used to match against a logger name to configure the logger's level. The first regexp in the sequence which matches the logger name will use the the level bound to that regexp to set the logger's level. If no regexp matches the logger name then the logger will be assigned the default_level. The regular expression comparision is performed with the re.search() function which means the match can be located anywhere in the name string (as opposed to the start of the string). Do not forget to escape regular expression metacharacters when appropriate. For example dot ('.') is used to seperate loggers in a logging hierarchy path (e.g. a.b.c) Examples:: # To match exactly the logger a.b.c and set it to DEBUG: logger_regexps = [(r'^a\.b\.c$', 'debug')] # To match any child of a.b and set it to INFO: logger_regexps = [(r'^a\.b\..*', 'info')] # To match any leaf logger with the name c and set it to level 5: logger_regexps = [(r'\.c$', 5)] handlers List of handler config dicts or (config, logger) tuples. See `create_log_handlers()` for details of a hanlder config. The simple form where handlers is a list of dicts each handler is bound to the log mangers root logger (see `create_log_handlers()` optional ``logger`` parameter). If you want to bind each handler to a specific logger other then root handler then group the handler config with a logger in a (config, logger) tuple. The logger may be either a logger name or a logger instance. The following are all valid methods of passing handler configuration.:: # List of 2 config dicts; both handlers bound to root logger [{}, {}] # List of 2 tuples; first handler bound to logger_name1 # by name, second bound to logger2 by object. [({}, 'logger_name1'), ({}, logger2'] # List of 1 dict, 1 tuple; first bound to root logger, # second bound to logger_name by name [{}, ({}, 'logger_name'] :parameters: config Dict of <key,value> pairs describing the configuration. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' for attr in ('debug', 'verbose', 'logger_regexps'): value = config.get(attr) if value is not None: setattr(self, attr, value) attr = 'default_level' value = config.get(attr) if value is not None: try: level = parse_log_level(value) except Exception as e: raise ValueError("could not set %s (%s)" % (attr, e)) setattr(self, attr, level) attr = 'handlers' handlers = config.get(attr) if handlers is not None: for item in handlers: logger = self.root_logger config = None if isinstance(item, dict): config = item elif isinstance(item, tuple): if len(item) != 2: raise ValueError('handler tuple must have exactly 2 items, got "%s"' % item) config = item[0] logger = item[1] else: raise TypeError('expected dict or tuple for handler item, got "%s", handlers=%s' % \ type(item), value) if not isinstance(config, dict): raise TypeError('expected dict for handler config, got "%s"', type(config)) if isinstance(logger, six.string_types): logger = self.get_logger(logger) else: if not isinstance(logger, logging.Logger): raise TypeError('expected logger name or logger object in %s' % item) self.create_log_handlers([config], logger, configure_state) if self.verbose: self.default_level = logging.INFO if self.debug: self.default_level = logging.DEBUG self.apply_configuration(configure_state) def create_log_handlers(self, configs, logger=None, configure_state=None): ''' Create new handlers and attach them to a logger (log mangers root logger by default). *Note, you may also pass the handler configs to `LogManager.configure()`.* configs is an iterable yielding a dict. Each dict configures a handler. Currently two types of handlers are supported: * stream * file Which type of handler is created is determined by the presence of the ``stream`` or ``filename`` in the dict. Configuration keys: =================== Handler type keys: ------------------ Exactly of the following must present in the config dict: stream Use the specified stream to initialize the StreamHandler. filename Specifies that a FileHandler be created, using the specified filename. log_handler Specifies a custom logging.Handler to use Common keys: ------------ name Set the name of the handler. This is optional but can be useful when examining the logging configuration. For files defaults to ``'file:absolute_path'`` and for streams it defaults to ``'stream:stream_name'`` format Use the specified format string for the handler. time_zone_converter Log record timestamps are seconds since the epoch in the UTC time zone stored as floating point values. When the formatter inserts a timestamp via the %(asctime)s format substitution it calls a time zone converter on the timestamp which returns a time.struct_time value to pass to the time.strftime function along with the datefmt format conversion string. The time module provides two functions with this signature, time.localtime and time.gmtime which performs a conversion to local time and UTC respectively. time.localtime is the default converter. Setting the time zone converter to time.gmtime is appropriate for date/time strings in UTC. The time_zone_converter attribute may be any function with the correct signature. Or as a convenience you may also pass a string which will select either the time.localtime or the time.gmtime converter. The case insenstive string mappings are:: 'local' => time.localtime 'localtime' => time.localtime 'gmt' => time.gmtime 'gmtime' => time.gmtime 'utc' => time.gmtime datefmt Use the specified time.strftime date/time format when formatting a timestamp via the %(asctime)s format substitution. The timestamp is first converted using the time_zone_converter to either local or UTC level Set the handler logger level to the specified level. May be one of the following strings: 'debug', 'info', 'warn', 'warning', 'error', 'critical' or any of the logging level constants. Thus level='debug' is equivalent to level=logging.DEBUG. Defaults to self.default_level. File handler keys: ------------------ filemode Specifies the mode to open the file. Defaults to 'a' for append, use 'w' for write. permission Set the permission bits on the file (i.e. chmod). Must be a valid integer (e.g. 0660 for rw-rw----) user Set the user owning the file. May be either a numeric uid or a basestring with a user name in the passwd file. group Set the group associated with the file, May be either a numeric gid or a basestring with a group name in the groups file. Examples: --------- The following shows how to set two handlers, one for a file (ipa.log) at the debug log level and a second handler set to stdout (e.g. console) at the info log level. (One handler sets it level with a simple name, the other with a logging constant just to illustrate the flexibility) :: # Get a root logger log_mgr = LogManger('my_app') # Create the handlers log_mgr.create_log_handlers([dict(filename='my_app.log', level='info', user='root', group='root', permission=0600, time_zone_converter='utc', datefmt='%Y-%m-%dT%H:%M:%SZ', # ISO 8601 format='<%(levelname)s> [%(asctime)s] module=%(name)s "%(message)s"'), dict(stream=sys.stdout, level=logging.ERROR, format='%(levelname)s: %(message)s')]) # Create a logger for my_app.foo.bar foo_bar_log = log_mgr.get_logger('foo.bar') root_logger.info("Ready to process requests") foo_bar_log.error("something went boom") In the file my_app.log you would see:: <INFO> [2011-10-26T01:39:00Z] module=my_app "Ready to process requests" <ERROR> [2011-10-26T01:39:00Z] module=may_app.foo.bar "something went boom" On the console you would see:: ERROR: something went boom :parameters: configs Sequence of dicts (any iterable yielding a dict). Each dict creates one handler and contains the configuration parameters used to create that handler. logger If unspecified the handlers will be attached to the LogManager.root_logger, otherwise the handlers will be attached to the specified logger. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. :return: The list of created handers. ''' if logger is None: logger = self.root_logger handlers = [] # Iterate over handler configurations. for cfg in configs: # Type of handler? filename = cfg.get('filename') stream = cfg.get("stream") log_handler = cfg.get("log_handler") if filename: if "stream" in cfg: raise ValueError("both filename and stream are specified, must be one or the other, config: %s" % cfg) path = os.path.abspath(filename) filemode = cfg.get('filemode', 'a') handler = logging.FileHandler(path, filemode) # Set the handler name name = cfg.get("name") if name is None: name = 'file:%s' % (path) handler.name = name # Path should now exist, set ownership and permissions if requested. # Set uid, gid (e.g. chmod) uid = gid = None user = cfg.get('user') group = cfg.get('group') if user is not None: if isinstance(user, six.string_types): pw = pwd.getpwnam(user) uid = pw.pw_uid elif isinstance(user, int): uid = user else: raise TypeError("user (%s) is not int or basestring" % user) if group is not None: if isinstance(group, six.string_types): pw = pwd.getpwnam(group) gid = pw.pw_gid elif isinstance(group, int): gid = group else: raise TypeError("group (%s) is not int or basestring" % group) if uid is not None or gid is not None: if uid is None: uid = -1 if gid is None: gid = -1 os.chown(path, uid, gid) # Set file permissions (e.g. mode) permission = cfg.get('permission') if permission is not None: os.chmod(path, permission) elif stream: handler = logging.StreamHandler(stream) # Set the handler name name = cfg.get("name") if name is None: name = 'stream:%s' % (stream) handler.name = name elif log_handler: handler = log_handler else: raise ValueError( "neither file nor stream nor log_handler specified in " "config: %s" % cfg) # Add the handler handlers.append(handler) # Configure message formatting on the handler format = cfg.get("format", LOGGING_DEFAULT_FORMAT) datefmt = cfg.get("datefmt", None) formatter = logging.Formatter(format, datefmt) time_zone_converter = cfg.get('time_zone_converter', time.localtime) if isinstance(time_zone_converter, six.string_types): converter = {'local' : time.localtime, 'localtime' : time.localtime, 'gmt' : time.gmtime, 'gmtime' : time.gmtime, 'utc' : time.gmtime}.get(time_zone_converter.lower()) if converter is None: raise ValueError("invalid time_zone_converter name (%s)" % \ time_zone_converter) elif callable(time_zone_converter): converter = time_zone_converter else: raise ValueError("time_zone_converter must be basestring or callable, not %s" % \ type(time_zone_converter)) formatter.converter = converter handler.setFormatter(formatter) # Set the logging level level = cfg.get('level') if level is not None: try: level = parse_log_level(level) except Exception as e: print('could not set handler log level "%s" (%s)' % (level, e), file=sys.stderr) level = None if level is None: level = self.default_level handler.setLevel(level) for handler in handlers: if handler.name in self.handlers: raise ValueError('handler "%s" already exists' % handler.name) logger.addHandler(handler) self.handlers[handler.name] = handler self.apply_configuration(configure_state) return handlers def get_handler(self, handler_name): ''' Given a handler name return the handler object associated with it. :parameters: handler_name Name of the handler to look-up. :returns: The handler object associated with the handler name. ''' handler = self.handlers.get(handler_name) if handler is None: raise KeyError('handler "%s" is not defined' % handler_name) return handler def set_handler_level(self, handler_name, level, configure_state=None): ''' Given a handler name, set the handler's level, return previous level. :parameters: handler_name Name of the handler to look-up. level The new level for the handler. See `log_manager.parse_log_level()` for details on how the level can be specified. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. :returns: The handler's previous level ''' handler = self.get_handler(handler_name) level = parse_log_level(level) prev_level = handler.level handler.setLevel(level) self.apply_configuration(configure_state) return prev_level def get_loggers_with_handler(self, handler): ''' Given a handler return a list of loggers that hander is bound to. :parameters: handler The name of a handler or a handler object. :returns: List of loggers with the handler is bound to. ''' if isinstance(handler, six.string_types): handler = self.get_handler(handler) elif isinstance(handler, logging.Handler): if not handler in self.handlers.values(): raise ValueError('handler "%s" is not managed by this log manager' % \ logging_obj_str(handler)) else: raise TypeError('handler must be basestring or Handler object, got %s' % type(handler)) loggers = [] for logger in self.loggers.values(): if handler in logger.handlers: loggers.append(logger) return loggers def remove_handler(self, handler, logger=None, configure_state=None): ''' Remove the named handler. If logger is unspecified the handler will be removed from all managed loggers, otherwise it will be removed from only the specified logger. :parameters: handler The name of the handler to be removed or the handler object. logger If unspecified the handler is removed from all loggers, otherwise the handler is removed from only this logger. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' if isinstance(handler, six.string_types): handler = self.get_handler(handler) elif not isinstance(handler, logging.Handler): raise TypeError('handler must be basestring or Handler object, got %s' % type(handler)) handler_name = handler.name if handler_name is None: raise ValueError('handler "%s" does not have a name' % logging_obj_str(handler)) loggers = self.get_loggers_with_handler(handler) if logger is None: for logger in loggers: logger.removeHandler(handler) del self.handlers[handler_name] else: if not logger in loggers: raise ValueError('handler "%s" is not bound to logger "%s"' % \ (handler_name, logging_obj_str(logger))) logger.removeHandler(handler) if len(loggers) == 1: del self.handlers[handler_name] self.apply_configuration(configure_state) def apply_configuration(self, configure_state=None): ''' Using the log manager's internal configuration state apply the configuration to all the objects managed by the log manager. :parameters: configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' if configure_state is not None: self.configure_state = configure_state for logger in self.loggers.values(): self._set_configured_logger_level(logger) def get_configured_logger_level(self, name): ''' Given a logger name return it's level as defined by the `LogManager` configuration. :parameters: name logger name :returns: log level ''' level = self.default_level for regexp, config_level in self.logger_regexps: if re.search(regexp, name): level = config_level break level = parse_log_level(level) return level def get_logger_handlers(self, logger): ''' Return the set of unique handlers visible to this logger. :parameters: logger The logger whose visible and enabled handlers will be returned. :return: Set of handlers ''' handlers = set() while logger: for handler in logger.handlers: handlers.add(handler) if logger.propagate: logger = logger.parent else: logger = None return handlers def get_minimum_handler_level_for_logger(self, logger): ''' Return the minimum handler level of all the handlers the logger is exposed to. :parameters: logger The logger whose handlers will be examined. :return: The minimum of all the handler's levels. If no handlers are defined sys.maxsize will be returned. ''' handlers = self.get_logger_handlers(logger) min_level = get_minimum_level(handlers) return min_level def _set_configured_logger_level(self, logger): ''' Based on the current configuration maintained by the log manager set this logger's level. If the level specified for this logger by the configuration is less than the minimum level supported by the output handlers the logger is exposed to then adjust the logger's level higher to the minimum handler level. This is a performance optimization, no point in emitting a log message if no handlers will ever output it. :parameters: logger The logger whose level is being configured. :return: The level actually set on the logger. ''' level = self.get_configured_logger_level(logger.name) minimum_handler_level = self.get_minimum_handler_level_for_logger(logger) if level < minimum_handler_level: level = minimum_handler_level logger.setLevel(level) return level def get_logger(self, who, bind_logger_names=False): ''' Return the logger for an object or a name. If the logger already exists return the existing instance otherwise create the logger. The who parameter may be either a name or an object. Loggers are identified by a name but because loggers are usually bound to a class this method is optimized to handle that case. If who is an object: * The name object's module name (dot seperated) and the object's class name. * Optionally the logging output methods can be bound to the object if bind_logger_names is True. Otherwise if who is a basestring it is used as the logger name. In all instances the root_logger_name is prefixed to every logger created by the manager. :parameters: who If a basestring then use this as the logger name, prefixed with the root_logger_name. Otherwise who is treated as a class instance. The logger name is formed by prepending the root_logger_name to the module name and then appending the class name. All name components are dot seperated. Thus if the root_logger_name is 'my_app', the class is ParseFileConfig living in the config.parsers module the logger name will be: ``my_app.config.parsers.ParseFileConfig``. bind_logger_names If true the class instance will have the following bound to it: ``log``, ``debug()``, ``info()``, ``warning()``, ``error()``, ``exception()``, ``critical()``. Where log is the logger object and the others are the loggers output methods. This is a convenience which allows you emit logging messages directly, for example:: self.debug('%d names defined', self.num_names). :return: The logger matching the name indicated by who. If the logger pre-existed return that instance otherwise create the named logger return it. ''' is_object = False if isinstance(who, six.string_types): obj_name = who else: is_object = True obj_name = '%s.%s' % (who.__module__, who.__class__.__name__) if obj_name == self.root_logger_name: logger_name = obj_name else: logger_name = self.root_logger_name + '.' + obj_name # If logger not in our cache then create and initialize the logger. logger = self.loggers.get(logger_name) if logger is None: logger = logging.getLogger(logger_name) self.loggers[logger_name] = logger self._set_configured_logger_level(logger) if bind_logger_names and is_object and getattr(who, '__log_manager', None) is None: setattr(who, '__log_manager', self) method = 'log' if hasattr(who, method): raise ValueError('%s is already bound to %s' % (method, repr(who))) setattr(who, method, logger) for method in logger_method_names: if hasattr(who, method): raise ValueError('%s is already bound to %s' % (method, repr(who))) setattr(who, method, getattr(logger, method)) return logger
""" Created by Emille Ishida in May, 2015. Class to implement calculations on data matrix. """ import os import sys import matplotlib.pylab as plt import numpy as np from multiprocessing import Pool from snclass.treat_lc import LC from snclass.util import read_user_input, read_snana_lc, translate_snid from snclass.functions import core_cross_val, screen class DataMatrix(object): """ Data matrix class. Methods: - build: Build data matrix according to user input file specifications. - reduce_dimension: Perform dimensionality reduction. - cross_val: Perform cross-validation. Attributes: - user_choices: dict, user input choices - snid: vector, list of objects identifiers - datam: array, data matrix for training - redshift: vector, redshift for training data - sntype: vector, classification of training data - low_dim_matrix: array, data matrix in KernelPC space - transf_test: function, project argument into KernelPC space - final: vector, optimize parameter values """ def __init__(self, input_file=None): """ Read user input file. input: input_file -> str name of user input file """ self.datam = None self.snid = [] self.redshift = None self.sntype = None self.low_dim_matrix = None self.transf_test = None self.final = None self.test_projection = [] if input_file is not None: self.user_choices = read_user_input(input_file) def check_file(self, filename, epoch=True, ref_filter=None): """ Construct one line of the data matrix. input: filename, str file of raw data for 1 supernova epoch, bool - optional If true, check if SN satisfies epoch cuts Default is True ref_filter, str - optional Reference filter for peak MJD calculation Default is None """ screen('Fitting ' + filename, self.user_choices) # translate identifier self.user_choices['path_to_lc'] = [translate_snid(filename, self.user_choices['photon_flag'][0])[0]] # read light curve raw data raw = read_snana_lc(self.user_choices) # initiate light curve object lc_obj = LC(raw, self.user_choices) # load GP fit lc_obj.load_fit_GP(self.user_choices['samples_dir'][0] + filename) # normalize lc_obj.normalize(ref_filter=ref_filter) # shift to peak mjd lc_obj.mjd_shift() if epoch: # check epoch requirements lc_obj.check_epoch() else: lc_obj.epoch_cuts = True if lc_obj.epoch_cuts: # build data matrix lines lc_obj.build_steps() # store obj_line = [] for fil in self.user_choices['filters']: for item in lc_obj.flux_for_matrix[fil]: obj_line.append(item) rflag = self.user_choices['redshift_flag'][0] redshift = raw[rflag][0] obj_class = raw[self.user_choices['type_flag'][0]][0] self.snid.append(raw['SNID:'][0]) return obj_line, redshift, obj_class else: screen('... Failed to pass epoch cuts!', self.user_choices) screen('\n', self.user_choices) return None def store_training(self, file_out): """ Store complete training matrix. input: file_out, str output file name """ # write to file if file_out is not None: op1 = open(file_out, 'w') op1.write('SNID type z LC...\n') for i in xrange(len(self.datam)): op1.write(str(self.snid[i]) + ' ' + str(self.sntype[i]) + ' ' + str(self.redshift[i]) + ' ') for j in xrange(len(self.datam[i])): op1.write(str(self.datam[i][j]) + ' ') op1.write('\n') op1.close() def build(self, file_out=None, check_epoch=True, ref_filter=None): """ Build data matrix according to user input file specifications. input: file_out -> str, optional file to store data matrix (str). Default is None check_epoch -> bool, optional If True check if SN satisfies epoch cuts Default is True ref_filter -> str, optional Reference filter for MJD calculation Default is None """ # list all files in sample directory file_list = os.listdir(self.user_choices['samples_dir'][0]) datam = [] redshift = [] sntype = [] for obj in file_list: if 'mean' in obj: sn_char = self.check_file(obj, epoch=check_epoch, ref_filter=ref_filter) if sn_char is not None: datam.append(sn_char[0]) redshift.append(sn_char[1]) sntype.append(sn_char[2]) self.datam = np.array(datam) self.redshift = np.array(redshift) self.sntype = np.array(sntype) # store results self.store_training(file_out) def reduce_dimension(self): """Perform dimensionality reduction with user defined function.""" # define dimensionality reduction function func = self.user_choices['dim_reduction_func'] # reduce dimensionality self.low_dim_matrix = func(self.datam, self.user_choices) # define transformation function self.transf_test = func(self.datam, self.user_choices, transform=True) def cross_val(self): """Optimize the hyperparameters for RBF kernel and ncomp.""" # correct type parameters if necessary types_func = self.user_choices['transform_types_func'] if types_func is not None: self.sntype = types_func(self.sntype, self.user_choices['Ia_flag'][0]) # initialize parameters data = self.datam types = self.sntype choices = self.user_choices nparticles = self.user_choices['n_cross_val_particles'] parameters = [] for i in xrange(nparticles): pars = {} pars['data'] = data pars['types'] = types pars['user_choices'] = choices parameters.append(pars) if int(self.user_choices['n_proc'][0]) > 0: cv_func = self.user_choices['cross_validation_func'] pool = Pool(processes=int(self.user_choices['n_proc'][0])) my_pool = pool.map_async(cv_func, parameters) try: results = my_pool.get(0xFFFF) except KeyboardInterrupt: print 'Interruputed by the user!' sys.exit() pool.close() pool.join() results = np.array(results) else: number = self.user_choices['n_cross_val_particles'] results = np.array([core_cross_val(pars) for pars in parameters]) flist = list(results[:,len(results[0])-1]) max_success = max(flist) indx_max = flist.index(max_success) self.final = {} for i in xrange(len(self.user_choices['cross_val_par'])): par_list = self.user_choices['cross_val_par'] self.final[par_list[i]] = results[indx_max][i] def final_configuration(self): """Determine final configuraton based on cross-validation results.""" #update optimized hyper-parameters for par in self.user_choices['cross_val_par']: indx = self.user_choices['cross_val_par'].index(par) self.user_choices[par] = self.final[par] #update low dimensional matrix self.reduce_dimension() def plot(self, pcs, file_out, show=False, test=None): """ Plot 2-dimensional scatter of data matrix in kPCA space. input: pcs, vector of int kernel PCs to be used as horizontal and vertical axis file_out, str file name to store final plot show, bool, optional if True show plot in screen Default is False test, dict, optional keywords: data, type if not None plot the projection of 1 photometric object Default is None """ #define vectors to plot xdata = self.low_dim_matrix[:,pcs[0]] ydata = self.low_dim_matrix[:,pcs[1]] if '0' in self.sntype: snIa = self.sntype == '0' nonIa = self.sntype != '0' else: snIa = self.sntype == 'Ia' snIbc = self.sntype == 'Ibc' snII = self.sntype == 'II' plt.figure(figsize=(10,10)) if '0' in self.sntype: plt.scatter(xdata[nonIa], ydata[nonIa], color='purple', marker='s', label='spec non-Ia') plt.scatter(xdata[snIa], ydata[snIa], color='blue', marker='o', label='spec Ia') else: plt.scatter(xdata[snII], ydata[snII], color='purple', marker='s', label='spec II') plt.scatter(xdata[snIbc], ydata[snIbc], color='green', marker='^', s=30, label='spec Ibc') plt.scatter(xdata[snIa], ydata[snIa], color='blue', marker='o', label='spec Ia') if test is not None: if len(test.samples_for_matrix) > 0: plt.title('prob_Ia = ' + str(round(test['prob_Ia'], 2))) if test.raw['SIM_NON1a:'][0] == '0': sntype = 'Ia' else: sntype = 'nonIa' plt.scatter([test.test_proj[0][pcs[0]]], [test.test_proj[0][pcs[1]]], marker='*', color='red', s=75, label='photo ' + sntype) plt.xlabel('kPC' + str(pcs[0] + 1), fontsize=14) plt.ylabel('kPC' + str(pcs[1] + 1), fontsize=14) plt.legend(fontsize=12) if show: plt.show() if file_out is not None: plt.savefig(file_out) plt.close() def main(): """Print documentation.""" print __doc__ if __name__ == '__main__': main()
__author__ = 'harsha' class ForceReply(object): def __init__(self, force_reply, selective): self.force_reply = force_reply self.selective = selective def get_force_reply(self): return self.force_reply def get_selective(self): return self.selective def __str__(self): return str(self.__dict__)
import functools @functools.total_ordering class Student: def __init__(self, firstname, lastname): #姓和名 self.firstname = firstname self.lastname = lastname def __eq__(self, other): #判断姓名是否一致 return ((self.lastname.lower(), self.firstname.lower()) == (other.lastname.lower(), other.firstname.lower())) def __lt__(self, other): #self姓名<other姓名 return ((self.lastname.lower(), self.firstname.lower()) < (other.lastname.lower(), other.firstname.lower())) if __name__ == '__main__': s1 = Student('Mary','Clinton') s2 = Student('Mary','Clinton') s3 = Student('Charlie','Clinton') print(s1==s2) print(s1>s3)
from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, Response from celery import Celery from werkzeug.utils import secure_filename from VideoPlayer import VideoPlayer from subprocess import Popen import os app = Flask(__name__) local = False if local: UPLOAD_FOLDER = '/home/dabo02/Desktop/Projects/Side_Projects/Upwork_Tom_VideoShowroom/static/video/' else: UPLOAD_FOLDER='/home/pi/Desktop/Upwork_Tom_VideoShowroom/static/video/' import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM) GPIO.setup(23, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) GPIO.setup(24, GPIO.OUT) app.config['CELERY_BROKER_URL'] = 'amqp://' app.config['CELERY_RESULT_BACKEND'] = 'amqp://' app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL']) celery.conf.update(app.config) ALLOWED_EXTENSIONS = set(['mp3', 'mp4']) light_state = False exit_flag = False current_video = None preview_video = '' def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS def check_for_current(): global current_video if not current_video: list_of_videos = os.listdir(UPLOAD_FOLDER) current_video = list_of_videos[0] @celery.task def main_routine(): vp = VideoPlayer() while True: mag_switch = GPIO.input(23) if mag_switch: if not vp.video_is_playing: GPIO.output(24, 0) check_for_current() global current_video vp.set_video(UPLOAD_FOLDER + current_video) vp.play_video() else: GPIO.output(24, 1) vp.stop_video() @app.route('/') def dashboard(): video_list = os.listdir(UPLOAD_FOLDER) video_info = {} videos = [] global current_video global preview_video global light_state preview = '' for v in video_list: if current_video: if current_video in v: current = True else: current = False else: current = False if preview_video: if preview_video in v: preview = v name = v.rsplit('.', 1)[0] video_info = {'name': name, 'id': v, 'current': current} videos.append(video_info) return render_template('index.html', videos=videos, preview=preview, light_state=light_state) @app.route('/upload_video', methods=['POST']) def upload_video(): if 'video' not in request.files: flash('No file part') return redirect(url_for('dashboard')) file = request.files['video'] if file.filename == '': flash('No selected file') return redirect(url_for('dashboard')) if file and allowed_file(file.filename): filename = secure_filename(file.filename) filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename) file.save(filepath) return redirect(url_for('dashboard')) @app.route('/remove_video/<id>', methods=['GET']) def remove_video(id): video_to_remove = UPLOAD_FOLDER + '/' + id os.remove(os.path.join(app.config['UPLOAD_FOLDER'], video_to_remove)) return redirect(url_for('dashboard')) @app.route('/update_video/<id>', methods=['GET']) def change_current_video(id): new_video = id global current_video current_video = new_video return redirect(url_for('dashboard')) @app.route('/preview_video/<id>', methods=['GET']) def preview_current_video(id): global preview_video preview_video = id return redirect(url_for('dashboard')) @app.route('/light_state/<state>', methods=['GET']) def light_state(state): global light_state if state in 'True': GPIO.output(24, 1) light_state = True return redirect(url_for('dashboard')) GPIO.output(24, 0) light_state = False return redirect(url_for('dashboard')) @app.route('/start') def start_loop(): task = main_routine.apply_async() return redirect(url_for('dashboard')) @app.route('/reboot') def reboot_pi(): GPIO.cleanup() Popen('reboot', shell=True) return '<div><h1>Rebooting Pi.....</h1></div>' @app.route('/shutdown') def shutdown_pi(): GPIO.cleanup() Popen('shutdown -h now', shell=True) return '<div><h1>Shutting Down Pi.....</h1></div>' if __name__ == '__main__': if local: app.run(host='localhost', port=3000) else: app.run(host='0.0.0.0', port=3500)
import todsynth import os import numpy import json import pandas class Calibrator( object ): ''' A todsynth.calibrator object is a container that stores coefficients that transform RAW dac units to physical units for a given TOD. ''' # Calibrator description. #000000000000000000000000000000000000000000000000000000000000000000000000 name = "" description = "" calType = "" # Information stored in the form of a dictionary. Careful not to abuse # of this in the sense of using it to process data! info = {} #000000000000000000000000000000000000000000000000000000000000000000000000 # Calibration coefficients coeffs = numpy.empty(0) # Detector index to Unique Identifier array __uid = numpy.empty(0) def __init__( self ): ''' self.name = name self.description = descrp self.calType = calType ''' def setCoeffs( self, c , uid=None ): ''' Set calibrator coefficients to c. ''' # Perform numpy.copy() to avoid cross referencing stuff self.__coeffs = numpy.copy( c ) if uid is not None: self.__uid = numpy.copy(uid) self.coeffs = self.coeffs[ self.__uid ] else: self.__uid = numpy.arange( len( self.coeffs ) ) def getCoeffs( self ): ''' Get a *copy* of the coefficients array. ''' return numpy.copy( self.coeffs ) def updateInfo( self, prop, value ): ''' Update calibrator info with a pair of prop : value ''' self.info.update( { 'prop' : value } ) def storeInPath( self , outPath ): ''' Stores the calibrator in JSON format at the specified path. ''' # Serialize this object data = { 'coefficients' : self.__coeffs, 'uid' : self.__uid } # Create PANDAS DataFrame out data df = pandas.DataFrame( data ) # Save DataFrame to HDF5 format df.to_csv( os.path.join( outPath, "%s.%s.cal" % (self.name,self.calType) ), index=False, sep=' ', header=True ) @classmethod def readFromPath( cls, systemPath ): ''' ''' self = cls() name,caltype,_ = os.path.basename( systemPath ).split('.') self.name = name self.calType = caltype self.description = '' # Load file calDF = pandas.read_csv( systemPath, header=0, names=['coefficients', 'uid'], delimiter=' ' ) self.setCoeffs( calDF['coefficients'], uid = calDF['uid'] ) return self
"""Generic base class for cli hammer commands.""" import logging from robottelo import ssh from robottelo.cli import hammer from robottelo.config import conf class CLIError(Exception): """Indicates that a CLI command could not be run.""" class CLIReturnCodeError(Exception): """Indicates that a CLI command has finished with return code, different from zero. :param return_code: CLI command return code :param stderr: contents of the ``stderr`` :param msg: explanation of the error """ def __init__(self, return_code, stderr, msg): self.return_code = return_code self.stderr = stderr self.msg = msg def __str__(self): return self.msg class Base(object): """ @param command_base: base command of hammer. Output of recent `hammer --help`:: activation-key Manipulate activation keys. architecture Manipulate architectures. auth Foreman connection login/logout. auth-source Manipulate auth sources. capsule Manipulate capsule compute-resource Manipulate compute resources. content-host Manipulate content hosts on the server content-view Manipulate content views. docker-image Manipulate docker images domain Manipulate domains. environment Manipulate environments. erratum Manipulate errata fact Search facts. filter Manage permission filters. global-parameter Manipulate global parameters. gpg Manipulate GPG Key actions on the server host Manipulate hosts. host-collection Manipulate host collections hostgroup Manipulate hostgroups. import Import data exported from a Red Hat Sat.. lifecycle-environment Manipulate lifecycle_environments location Manipulate locations. medium Manipulate installation media. model Manipulate hardware models. organization Manipulate organizations os Manipulate operating system. package Manipulate packages. package-group Manipulate package groups partition-table Manipulate partition tables. ping Get the status of the server product Manipulate products. proxy Manipulate smart proxies. puppet-class Search puppet modules. puppet-module View Puppet Module details. report Browse and read reports. repository Manipulate repositories repository-set Manipulate repository sets on the server role Manage user roles. sc-param Manipulate smart class parameters. shell Interactive shell subnet Manipulate subnets. subscription Manipulate subscriptions. sync-plan Manipulate sync plans task Tasks related actions. template Manipulate config templates. user Manipulate users. user-group Manage user groups. @since: 27.Nov.2013 """ command_base = None # each inherited instance should define this command_sub = None # specific to instance, like: create, update, etc command_requires_org = False # True when command requires organization-id logger = logging.getLogger('robottelo') @classmethod def _handle_response(cls, response, ignore_stderr=None): """Verify ``return_code`` of the CLI command. Check for a non-zero return code or any stderr contents. :param response: a ``SSHCommandResult`` object, returned by :mod:`robottelo.ssh.command`. :param ignore_stderr: indicates whether to throw a warning in logs if ``stderr`` is not empty. :returns: contents of ``stdout``. :raises robottelo.cli.base.CLIReturnCodeError: If return code is different from zero. """ if response.return_code != 0: raise CLIReturnCodeError( response.return_code, response.stderr, u'Command "{0} {1}" finished with return_code {2}\n' 'stderr contains following message:\n{3}' .format( cls.command_base, cls.command_sub, response.return_code, response.stderr, ) ) if len(response.stderr) != 0 and not ignore_stderr: cls.logger.warning( u'stderr contains following message:\n{0}' .format(response.stderr) ) return response.stdout @classmethod def add_operating_system(cls, options=None): """ Adds OS to record. """ cls.command_sub = 'add-operatingsystem' result = cls.execute(cls._construct_command(options)) return result @classmethod def create(cls, options=None): """ Creates a new record using the arguments passed via dictionary. """ cls.command_sub = 'create' if options is None: options = {} result = cls.execute( cls._construct_command(options), output_format='csv') # Extract new object ID if it was successfully created if len(result) > 0 and 'id' in result[0]: obj_id = result[0]['id'] # Fetch new object # Some Katello obj require the organization-id for subcommands info_options = {u'id': obj_id} if cls.command_requires_org: if 'organization-id' not in options: raise CLIError( 'organization-id option is required for {0}.create' .format(cls.__name__) ) info_options[u'organization-id'] = options[u'organization-id'] new_obj = cls.info(info_options) # stdout should be a dictionary containing the object if len(new_obj) > 0: result = new_obj return result @classmethod def delete(cls, options=None): """Deletes existing record.""" cls.command_sub = 'delete' return cls.execute( cls._construct_command(options), ignore_stderr=True, ) @classmethod def delete_parameter(cls, options=None): """ Deletes parameter from record. """ cls.command_sub = 'delete-parameter' result = cls.execute(cls._construct_command(options)) return result @classmethod def dump(cls, options=None): """ Displays the content for existing partition table. """ cls.command_sub = 'dump' result = cls.execute(cls._construct_command(options)) return result @classmethod def _get_username_password(cls, username=None, password=None): """Lookup for the username and password for cli command in following order: 1. ``user`` or ``password`` parameters 2. ``foreman_admin_username`` or ``foreman_admin_password`` attributes 3. foreman.admin.username or foreman.admin.password configuration :return: A tuple with the username and password found :rtype: tuple """ if username is None: try: username = getattr(cls, 'foreman_admin_username') except AttributeError: username = conf.properties['foreman.admin.username'] if password is None: try: password = getattr(cls, 'foreman_admin_password') except AttributeError: password = conf.properties['foreman.admin.password'] return (username, password) @classmethod def execute(cls, command, user=None, password=None, output_format=None, timeout=None, ignore_stderr=None, return_raw_response=None): """Executes the cli ``command`` on the server via ssh""" user, password = cls._get_username_password(user, password) # add time to measure hammer performance perf_test = conf.properties.get('performance.test.foreman.perf', '0') cmd = u'LANG={0} {1} hammer -v -u {2} -p {3} {4} {5}'.format( conf.properties['main.locale'], u'time -p' if perf_test == '1' else '', user, password, u'--output={0}'.format(output_format) if output_format else u'', command, ) response = ssh.command( cmd.encode('utf-8'), output_format=output_format, timeout=timeout, ) if return_raw_response: return response else: return cls._handle_response( response, ignore_stderr=ignore_stderr, ) @classmethod def exists(cls, options=None, search=None): """Search for an entity using the query ``search[0]="search[1]"`` Will be used the ``list`` command with the ``--search`` option to do the search. If ``options`` argument already have a search key, then the ``search`` argument will not be evaluated. Which allows different search query. """ if options is None: options = {} if search is not None and u'search' not in options: options.update({u'search': u'{0}=\\"{1}\\"'.format( search[0], search[1])}) result = cls.list(options) if result: result = result[0] return result @classmethod def info(cls, options=None, output_format=None): """Reads the entity information.""" cls.command_sub = 'info' if options is None: options = {} if cls.command_requires_org and 'organization-id' not in options: raise CLIError( 'organization-id option is required for {0}.info' .format(cls.__name__) ) result = cls.execute( command=cls._construct_command(options), output_format=output_format ) if output_format != 'json': result = hammer.parse_info(result) return result @classmethod def list(cls, options=None, per_page=True): """ List information. @param options: ID (sometimes name works as well) to retrieve info. """ cls.command_sub = 'list' if options is None: options = {} if 'per-page' not in options and per_page: options[u'per-page'] = 10000 if cls.command_requires_org and 'organization-id' not in options: raise CLIError( 'organization-id option is required for {0}.list' .format(cls.__name__) ) result = cls.execute( cls._construct_command(options), output_format='csv') return result @classmethod def puppetclasses(cls, options=None): """ Lists all puppet classes. """ cls.command_sub = 'puppet-classes' result = cls.execute( cls._construct_command(options), output_format='csv') return result @classmethod def remove_operating_system(cls, options=None): """ Removes OS from record. """ cls.command_sub = 'remove-operatingsystem' result = cls.execute(cls._construct_command(options)) return result @classmethod def sc_params(cls, options=None): """ Lists all smart class parameters. """ cls.command_sub = 'sc-params' result = cls.execute( cls._construct_command(options), output_format='csv') return result @classmethod def set_parameter(cls, options=None): """ Creates or updates parameter for a record. """ cls.command_sub = 'set-parameter' result = cls.execute(cls._construct_command(options)) return result @classmethod def update(cls, options=None): """ Updates existing record. """ cls.command_sub = 'update' result = cls.execute( cls._construct_command(options), output_format='csv') return result @classmethod def with_user(cls, username=None, password=None): """Context Manager for credentials""" if username is None: username = conf.properties['foreman.admin.username'] if password is None: password = conf.properties['foreman.admin.password'] class Wrapper(cls): """Wrapper class which defines the foreman admin username and password to be used when executing any cli command. """ foreman_admin_username = username foreman_admin_password = password return Wrapper @classmethod def _construct_command(cls, options=None): """ Build a hammer cli command based on the options passed """ tail = u'' if options is None: options = {} for key, val in options.items(): if val is None: continue if val is True: tail += u' --{0}'.format(key) elif val is not False: if isinstance(val, list): val = ','.join(str(el) for el in val) tail += u' --{0}="{1}"'.format(key, val) cmd = u'{0} {1} {2}'.format( cls.command_base, cls.command_sub, tail.strip() ) return cmd
normIncludes = [ {"fieldName": "field1", "includes": "GOOD,VALUE", "excludes": "BAD,STUFF", "begins": "", "ends": "", "replace": "goodvalue"}, {"fieldName": "field1", "includes": "", "excludes": "", "begins": "ABC", "ends": "", "replace": "goodvalue"}, {"fieldName": "field1", "includes": "", "excludes": "", "begins": "", "ends": "XYZ", "replace": "goodvalue"}, {"fieldName": "field100"} ]
""" Created on Thu Aug 31 16:04:18 2017 @author: adelpret """ import pinocchio as se3 import numpy as np from pinocchio import RobotWrapper from conversion_utils import config_sot_to_urdf, joints_sot_to_urdf, velocity_sot_to_urdf from dynamic_graph.sot.torque_control.inverse_dynamics_balance_controller import InverseDynamicsBalanceController from dynamic_graph.sot.torque_control.create_entities_utils import create_ctrl_manager import dynamic_graph.sot.torque_control.hrp2.balance_ctrl_sim_conf as balance_ctrl_conf import dynamic_graph.sot.torque_control.hrp2.control_manager_sim_conf as control_manager_conf from dynamic_graph.sot.torque_control.tests.robot_data_test import initRobotData np.set_printoptions(precision=3, suppress=True, linewidth=100); def create_balance_controller(dt, q, conf, robot_name='robot'): ctrl = InverseDynamicsBalanceController("invDynBalCtrl"); ctrl.q.value = tuple(q); ctrl.v.value = (NJ+6)*(0.0,); ctrl.wrench_right_foot.value = 6*(0.0,); ctrl.wrench_left_foot.value = 6*(0.0,); ctrl.posture_ref_pos.value = tuple(q[6:]); ctrl.posture_ref_vel.value = NJ*(0.0,); ctrl.posture_ref_acc.value = NJ*(0.0,); ctrl.com_ref_pos.value = (0., 0., 0.8); ctrl.com_ref_vel.value = 3*(0.0,); ctrl.com_ref_acc.value = 3*(0.0,); ctrl.rotor_inertias.value = tuple([g*g*r for (g,r) in zip(conf.GEAR_RATIOS, conf.ROTOR_INERTIAS)]) ctrl.gear_ratios.value = NJ*(1.0,); ctrl.contact_normal.value = conf.FOOT_CONTACT_NORMAL; ctrl.contact_points.value = conf.RIGHT_FOOT_CONTACT_POINTS; ctrl.f_min.value = conf.fMin; ctrl.f_max_right_foot.value = conf.fMax; ctrl.f_max_left_foot.value = conf.fMax; ctrl.mu.value = conf.mu[0]; ctrl.weight_contact_forces.value = (1e2, 1e2, 1e0, 1e3, 1e3, 1e3); ctrl.kp_com.value = 3*(conf.kp_com,); ctrl.kd_com.value = 3*(conf.kd_com,); ctrl.kp_constraints.value = 6*(conf.kp_constr,); ctrl.kd_constraints.value = 6*(conf.kd_constr,); ctrl.kp_feet.value = 6*(conf.kp_feet,); ctrl.kd_feet.value = 6*(conf.kd_feet,); ctrl.kp_posture.value = conf.kp_posture; ctrl.kd_posture.value = conf.kd_posture; ctrl.kp_pos.value = conf.kp_pos; ctrl.kd_pos.value = conf.kd_pos; ctrl.w_com.value = conf.w_com; ctrl.w_feet.value = conf.w_feet; ctrl.w_forces.value = conf.w_forces; ctrl.w_posture.value = conf.w_posture; ctrl.w_base_orientation.value = conf.w_base_orientation; ctrl.w_torques.value = conf.w_torques; ctrl.active_joints.value = NJ*(1,); ctrl.init(dt, robot_name); return ctrl; print "*** UNIT TEST FOR INVERSE-DYNAMICS-BALANCE-CONTROLLER (IDBC) ***" print "This test computes the torques using the IDBC and compares them with" print "the torques computed using the desired joint accelerations and contact" print "wrenches computed by the IDBC. The two values should be identical." print "Some small differences are expected due to the precision loss when" print "Passing the parameters from python to c++." print "However, none of the following values should be larger than 1e-3.\n" N_TESTS = 100 dt = 0.001; NJ = initRobotData.nbJoints q_sot = np.array([-0.0027421149619457344, -0.0013842807952574399, 0.6421082804660067, -0.0005693871512031474, -0.0013094048521806974, 0.0028568508070167, -0.0006369040657361668, 0.002710094953239396, -0.48241992906618536, 0.9224570746372157, -0.43872624301275104, -0.0021586727954009096, -0.0023395862060549863, 0.0031045906573987617, -0.48278188636903313, 0.9218508861779927, -0.4380058166724791, -0.0025558837738616047, -0.012985322450541008, 0.04430420221275542, 0.37027327677517635, 1.4795064165303056, 0.20855551221055582, -0.13188842278441873, 0.005487207370709895, -0.2586657542648506, 2.6374918629921953, -0.004223605878088189, 0.17118034021053144, 0.24171737354070008, 0.11594430024547904, -0.05264225067057105, -0.4691871937149223, 0.0031522040623960016, 0.011836097472447007, 0.18425595002313025]); ctrl_manager = create_ctrl_manager(control_manager_conf, dt); ctrl = create_balance_controller(dt, q_sot, balance_ctrl_conf); robot = RobotWrapper(initRobotData.testRobotPath, [], se3.JointModelFreeFlyer()) index_rf = robot.index('RLEG_JOINT5'); index_lf = robot.index('LLEG_JOINT5'); Md = np.matrix(np.zeros((NJ+6,NJ+6))); gr = joints_sot_to_urdf(balance_ctrl_conf.GEAR_RATIOS); ri = joints_sot_to_urdf(balance_ctrl_conf.ROTOR_INERTIAS); for i in range(NJ): Md[6+i,6+i] = ri[i] * gr[i] * gr[i]; for i in range(N_TESTS): q_sot += 0.001*np.random.random(NJ+6); v_sot = np.random.random(NJ+6); q_pin = np.matrix(config_sot_to_urdf(q_sot)); v_pin = np.matrix(velocity_sot_to_urdf(v_sot)); ctrl.q.value = tuple(q_sot); ctrl.v.value = tuple(v_sot); ctrl.tau_des.recompute(i); tau_ctrl = joints_sot_to_urdf(np.array(ctrl.tau_des.value)); ctrl.dv_des.recompute(i); dv = velocity_sot_to_urdf(np.array(ctrl.dv_des.value)); M = Md + robot.mass(q_pin); h = robot.bias(q_pin, v_pin); ctrl.f_des_right_foot.recompute(i); ctrl.f_des_left_foot.recompute(i); f_rf = np.matrix(ctrl.f_des_right_foot.value).T; f_lf = np.matrix(ctrl.f_des_left_foot.value).T; J_rf = robot.jacobian(q_pin, index_rf); J_lf = robot.jacobian(q_pin, index_lf); tau_pin = M*np.matrix(dv).T + h - J_rf.T * f_rf - J_lf.T * f_lf; print "norm(tau_ctrl-tau_pin) = %.4f"% np.linalg.norm(tau_ctrl - tau_pin[6:,0].T); print "norm(tau_pin[:6]) = %.4f"% np.linalg.norm(tau_pin[:6]);
""" YieldFrom astroid node This node represents the Python "yield from" statement, which functions similarly to the "yield" statement except that the generator can delegate some generating work to another generator. Attributes: - value (GeneratorExp) - The generator that this YieldFrom is delegating work to. Example: - value -> Call(range, Name('g', Load())) """ def fun(g): yield from range(g)
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'GeoNamesMatchingLogMatchedPlaces.remark' db.add_column('united_geonames_geonamesmatchinglogmatchedplaces', 'remark', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'GeoNamesMatchingLogMatchedPlaces.remark' db.delete_column('united_geonames_geonamesmatchinglogmatchedplaces', 'remark') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 25, 14, 53, 19, 34425)'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 25, 14, 53, 19, 34316)'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'united_geonames.geonamesmatchinglogmatch': { 'Meta': {'ordering': "['-matching_index']", 'object_name': 'GeoNamesMatchingLogMatch'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}), 'display_for_users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'matching_index': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '233', 'null': 'True', 'blank': 'True'}), 'number_of_alternatives': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}) }, 'united_geonames.geonamesmatchinglogmatchedplaces': { 'Meta': {'object_name': 'GeoNamesMatchingLogMatchedPlaces'}, 'best_match': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'geographical_distance': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'matchinglogmatch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'matched'", 'null': 'True', 'to': "orm['united_geonames.GeoNamesMatchingLogMatch']"}), 'ngram_distance': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}), 'percentage': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}), 'remark': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'united_geoname': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['united_geonames.UnitedGeoName']", 'null': 'True', 'blank': 'True'}) }, 'united_geonames.unitedgeoname': { 'Meta': {'object_name': 'UnitedGeoName'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'main_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}) }, 'united_geonames.unitedgeonamesynonim': { 'Meta': {'object_name': 'UnitedGeoNameSynonim'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}), 'coordinates': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'spatial_index': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'identifier': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}), 'region': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}), 'subregion': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}), 'synonim_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'synonim_content_type_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}), 'synonim_name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}), 'synonim_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'united_geoname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'geonames'", 'null': 'True', 'to': "orm['united_geonames.UnitedGeoName']"}) }, 'united_geonames.usergeoname': { 'Meta': {'object_name': 'UserGeoName'}, 'coordinates': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'spatial_index': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'subregion': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, 'united_geonames.userproject': { 'Meta': {'object_name': 'UserProject'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}) } } complete_apps = ['united_geonames']
from django.utils.translation import ugettext_noop as _ from geonode.notifications_helper import NotificationsAppConfigBase class PeopleAppConfig(NotificationsAppConfigBase): name = 'geonode.people' NOTIFICATIONS = (("user_follow", _("User following you"), _("Another user has started following you"),), ("account_approve", _("User requested access"), _("A new user has requested access to the site"),), ("account_active", _("Account activated"), _("This account is now active and can log in the site"),), ) def ready(self): super(PeopleAppConfig, self).ready() default_app_config = 'geonode.people.PeopleAppConfig'
import sys import math import time from bzrc import BZRC, Command class Agent(object): """Class handles all command and control logic for a teams tanks.""" def __init__(self, bzrc): self.bzrc = bzrc self.constants = self.bzrc.get_constants() self.commands = [] def tick(self, time_diff): """Some time has passed; decide what to do next.""" mytanks, othertanks, flags, shots, obstacles = self.bzrc.get_lots_o_stuff() self.mytanks = mytanks self.othertanks = othertanks self.flags = flags self.shots = shots self.enemies = [tank for tank in othertanks if tank.color != self.constants['team']] self.commands = [] for tank in mytanks: self.attack_enemies(tank) results = self.bzrc.do_commands(self.commands) def attack_enemies(self, tank): """Find the closest enemy and chase it, shooting as you go.""" best_enemy = None best_dist = 2 * float(self.constants['worldsize']) for enemy in self.enemies: if enemy.status != 'alive': continue dist = math.sqrt((enemy.x - tank.x)**2 + (enemy.y - tank.y)**2) if dist < best_dist: best_dist = dist best_enemy = enemy if best_enemy is None: command = Command(tank.index, 0, 0, False) self.commands.append(command) else: self.move_to_position(tank, best_enemy.x, best_enemy.y) def move_to_position(self, tank, target_x, target_y): """Set command to move to given coordinates.""" target_angle = math.atan2(target_y - tank.y, target_x - tank.x) relative_angle = self.normalize_angle(target_angle - tank.angle) command = Command(tank.index, 1, 2 * relative_angle, True) self.commands.append(command) def normalize_angle(self, angle): """Make any angle be between +/- pi.""" angle -= 2 * math.pi * int (angle / (2 * math.pi)) if angle <= -math.pi: angle += 2 * math.pi elif angle > math.pi: angle -= 2 * math.pi return angle def main(): # Process CLI arguments. try: execname, host, port = sys.argv except ValueError: execname = sys.argv[0] print >>sys.stderr, '%s: incorrect number of arguments' % execname print >>sys.stderr, 'usage: %s hostname port' % sys.argv[0] sys.exit(-1) # Connect. #bzrc = BZRC(host, int(port), debug=True) bzrc = BZRC(host, int(port)) agent = Agent(bzrc) prev_time = time.time() # Run the agent try: while True: time_diff = time.time() - prev_time agent.tick(time_diff) except KeyboardInterrupt: print "Exiting due to keyboard interrupt." bzrc.close() if __name__ == '__main__': main()
from itertools import * MOD = 10007 fact = [1] * MOD for i in xrange(1, MOD): fact[i] = (fact[i-1] * i) def choose(n, k): if k > n: return 0 elif n < MOD: return (fact[n]/fact[n-k]/fact[k])%MOD else: prod = 1 while n > 0: prod *= choose(n%MOD, k%MOD) prod %= MOD n /= MOD k /= MOD return prod def compute(): h, w, r = map(int, raw_input().split()) rocks = [map(int, raw_input().split()) for i in range(r)] if (h+w-2)%3 != 0: return 0 # normalize rock coordinates h, w = h-1-(h+w-2)/3, w-1-(h+w-2)/3 for i in range(r): row, col = rocks[i] if (row+col-2)%3 != 0: rocks[i] = None else: rocks[i] = [row-1-(row+col-2)/3, col-1-(row+col-2)/3] if rocks[i][0] < 0 or rocks[i][0] > h: rocks[i] = None elif rocks[i][1] < 0 or rocks[i][1] > w: rocks[i] = None total = 0 for num in range(r+1): for perm in permutations(range(r), num): # verify increasing property of permutation inc = True for i in range(num): if rocks[perm[i]] == None: inc = False break if i > 0: if rocks[perm[i]][0] < rocks[perm[i-1]][0]: inc = False break if rocks[perm[i]][1] < rocks[perm[i-1]][1]: inc = False break if inc: points = [[0,0]] + [rocks[j] for j in perm] + [[h,w]] # number of paths going through all points prod = 1 for j in range(1, len(points)): dh = points[j][0] - points[j-1][0] dw = points[j][1] - points[j-1][1] prod *= choose(dh+dw, dw) prod %= MOD # inclusion-exclusion total += (-1)**num * prod total %= MOD return total for i in range(input()): print "Case #%d: %d" % (i+1, compute())
""" This module provides functions that generate commonly used Hamiltonian terms. """ __all__ = [ "Annihilator", "Creator", "CPFactory", "HoppingFactory", "PairingFactory", "HubbardFactory", "CoulombFactory", "HeisenbergFactory", "IsingFactory", "TwoSpinTermFactory", ] from HamiltonianPy.quantumoperator.constant import ANNIHILATION, CREATION, \ SPIN_DOWN, SPIN_UP from HamiltonianPy.quantumoperator.particlesystem import AoC, ParticleTerm from HamiltonianPy.quantumoperator.spinsystem import * def Creator(site, spin=0, orbit=0): """ Generate creation operator: $c_i^{\\dagger}$. Parameters ---------- site : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. The `site` parameter should be 1D array with length 1,2 or 3. spin : int, optional The spin index of the single-particle state. Default: 0. orbit : int, optional The orbit index of the single-particle state. Default: 0. Returns ------- operator : AoC The corresponding creation operator. Examples -------- >>> from HamiltonianPy.quantumoperator import Creator >>> Creator((0, 0), spin=1) AoC(otype=CREATION, site=(0, 0), spin=1, orbit=0) """ return AoC(CREATION, site=site, spin=spin, orbit=orbit) def Annihilator(site, spin=0, orbit=0): """ Generate annihilation operator: $c_i$. Parameters ---------- site : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. The `site` parameter should be 1D array with length 1,2 or 3. spin : int, optional The spin index of the single-particle state. Default: 0. orbit : int, optional The orbit index of the single-particle state. Default: 0. Returns ------- operator : AoC The corresponding annihilation operator. Examples -------- >>> from HamiltonianPy.quantumoperator import Annihilator >>> Annihilator((0, 0), spin=0) AoC(otype=ANNIHILATION, site=(0, 0), spin=0, orbit=0) """ return AoC(ANNIHILATION, site=site, spin=spin, orbit=orbit) def CPFactory(site, *, spin=0, orbit=0, coeff=1.0): """ Generate chemical potential term: '$\\mu c_i^{\\dagger} c_i$'. Parameters ---------- site : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. The `site` parameter should be 1D array with length 1,2 or 3. spin : int, optional, keyword-only The spin index of the single-particle state. Default: 0. orbit : int, optional, keyword-only The orbit index of the single-particle state. Default: 0. coeff : int or float, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- term : ParticleTerm The corresponding chemical potential term. Examples -------- >>> from HamiltonianPy.quantumoperator import CPFactory >>> term = CPFactory((0, 0)) >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=CREATION, site=(0, 0), spin=0, orbit=0) AoC(otype=ANNIHILATION, site=(0, 0), spin=0, orbit=0) """ c = AoC(CREATION, site=site, spin=spin, orbit=orbit) a = AoC(ANNIHILATION, site=site, spin=spin, orbit=orbit) return ParticleTerm((c, a), coeff=coeff, classification="number") def HoppingFactory( site0, site1, *, spin0=0, spin1=None, orbit0=0, orbit1=None, coeff=1.0 ): """ Generate hopping term: '$t c_i^{\\dagger} c_j$'. These parameters suffixed with '0' are for the creation operator and '1' for annihilation operator. Parameters ---------- site0, site1 : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. `site0` and `site1` should be 1D array with length 1, 2 or 3. spin0, spin1 : int, optional, keyword-only The spin index of the single-particle state. The default value for `spin0` is 0; The default value for `spin1` is None, which implies that `spin1` takes the same value as `spin0`. orbit0, orbit1 : int, optional, keyword-only The orbit index of the single-particle state. The default value for `orbit0` is 0; The default value for `orbit1` is None, which implies that `orbit1` takes the same value as `orbit0`. coeff : int, float or complex, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- term : ParticleTerm The corresponding hopping term. Examples -------- >>> from HamiltonianPy.quantumoperator import HoppingFactory >>> term = HoppingFactory(site0=(0, 0), site1=(1, 1), spin0=1) >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=CREATION, site=(0, 0), spin=1, orbit=0) AoC(otype=ANNIHILATION, site=(1, 1), spin=1, orbit=0) >>> term = HoppingFactory(site0=(0, 0), site1=(1, 1), spin0=0, spin1=1) >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=CREATION, site=(0, 0), spin=0, orbit=0) AoC(otype=ANNIHILATION, site=(1, 1), spin=1, orbit=0) """ if spin1 is None: spin1 = spin0 if orbit1 is None: orbit1 = orbit0 c = AoC(CREATION, site=site0, spin=spin0, orbit=orbit0) a = AoC(ANNIHILATION, site=site1, spin=spin1, orbit=orbit1) classification = "hopping" if c.state != a.state else "number" return ParticleTerm((c, a), coeff=coeff, classification=classification) def PairingFactory( site0, site1, *, spin0=0, spin1=0, orbit0=0, orbit1=0, coeff=1.0, which="h" ): """ Generate pairing term: '$p c_i^{\\dagger} c_j^{\\dagger}$' or '$p c_i c_j$'. These parameters suffixed with '0' are for the 1st operator and '1' for 2nd operator. Parameters ---------- site0, site1 : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. `site0` and `site1` should be 1D array with length 1, 2 or 3. spin0, spin1 : int, optional, keyword-only The spin index of the single-particle state. Default: 0. orbit0, orbit1 : int, optional, keyword-only The orbit index of the single-particle state. Default: 0. coeff : int, float or complex, optional, keyword-only The coefficient of this term. Default: 1.0. which : str, optional, keyword-only Determine whether to generate a particle- or hole-pairing term. Valid values: ["h" | "hole"] for hole-pairing; ["p" | "particle"] for particle-pairing. Default: "h". Returns ------- term : ParticleTerm The corresponding pairing term. Examples -------- >>> from HamiltonianPy.quantumoperator import PairingFactory >>> term = PairingFactory((0, 0), (1, 1), spin0=0, spin1=1, which="h") >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=ANNIHILATION, site=(0, 0), spin=0, orbit=0) AoC(otype=ANNIHILATION, site=(1, 1), spin=1, orbit=0) >>> term = PairingFactory((0, 0), (1, 1), spin0=0, spin1=1, which="p") >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=CREATION, site=(0, 0), spin=0, orbit=0) AoC(otype=CREATION, site=(1, 1), spin=1, orbit=0) """ assert which in ("h", "hole", "p", "particle") otype = ANNIHILATION if which in ("h", "hole") else CREATION aoc0 = AoC(otype, site=site0, spin=spin0, orbit=orbit0) aoc1 = AoC(otype, site=site1, spin=spin1, orbit=orbit1) return ParticleTerm((aoc0, aoc1), coeff=coeff) def HubbardFactory(site, *, orbit=0, coeff=1.0): """ Generate Hubbard term: '$U n_{i\\uparrow} n_{i\\downarrow}$'. This function is valid only for spin-1/2 system. Parameters ---------- site : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. `site` should be 1D array with length 1,2 or 3. orbit : int, optional, keyword-only The orbit index of the single-particle state. Default: 0. coeff : int or float, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- term : ParticleTerm The corresponding Hubbard term. Examples -------- >>> from HamiltonianPy.quantumoperator import HubbardFactory >>> term = HubbardFactory(site=(0, 0)) >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=CREATION, site=(0, 0), spin=1, orbit=0) AoC(otype=ANNIHILATION, site=(0, 0), spin=1, orbit=0) AoC(otype=CREATION, site=(0, 0), spin=0, orbit=0) AoC(otype=ANNIHILATION, site=(0, 0), spin=0, orbit=0) """ c_up = AoC(CREATION, site=site, spin=SPIN_UP, orbit=orbit) c_down = AoC(CREATION, site=site, spin=SPIN_DOWN, orbit=orbit) a_up = AoC(ANNIHILATION, site=site, spin=SPIN_UP, orbit=orbit) a_down = AoC(ANNIHILATION, site=site, spin=SPIN_DOWN, orbit=orbit) return ParticleTerm( (c_up, a_up, c_down, a_down), coeff=coeff, classification="Coulomb" ) def CoulombFactory( site0, site1, *, spin0=0, spin1=0, orbit0=0, orbit1=0, coeff=1.0 ): """ Generate Coulomb interaction term: '$U n_i n_j$'. These parameters suffixed with '0' are for the 1st operator and '1' for 2nd operator. Parameters ---------- site0, site1 : list, tuple or 1D np.ndarray The coordinates of the localized single-particle state. `site0` and `site1` should be 1D array with length 1, 2 or 3. spin0, spin1 : int, optional, keyword-only The spin index of the single-particle state. Default: 0. orbit0, orbit1 : int, optional, keyword-only The orbit index of the single-particle state. Default: 0. coeff : int or float, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- term : ParticleTerm The corresponding Coulomb interaction term. Examples -------- >>> from HamiltonianPy.quantumoperator import CoulombFactory >>> term = CoulombFactory((0, 0), (1, 1), spin0=0, spin1=1) >>> print(term) The coefficient of this term: 1.0 The component operators: AoC(otype=CREATION, site=(0, 0), spin=0, orbit=0) AoC(otype=ANNIHILATION, site=(0, 0), spin=0, orbit=0) AoC(otype=CREATION, site=(1, 1), spin=1, orbit=0) AoC(otype=ANNIHILATION, site=(1, 1), spin=1, orbit=0) """ c0 = AoC(CREATION, site=site0, spin=spin0, orbit=orbit0) a0 = AoC(ANNIHILATION, site=site0, spin=spin0, orbit=orbit0) c1 = AoC(CREATION, site=site1, spin=spin1, orbit=orbit1) a1 = AoC(ANNIHILATION, site=site1, spin=spin1, orbit=orbit1) return ParticleTerm((c0, a0, c1, a1), coeff=coeff, classification="Coulomb") def HeisenbergFactory(site0, site1, *, coeff=1.0): """ Generate Heisenberg interaction term: '$J S_i S_j$'. Parameters ---------- site0, site1 : list, tuple or 1D np.ndarray The coordinates of the lattice site on which the spin operator is defined. `site0` and `site1` should be 1D array with length 1, 2 or 3. `site0` for the first spin operator and `site1` for the second spin operator. coeff : int or float, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- terms : 3-tuple terms[0] is the '$J S_i^z S_j^z$' term; terms[1] is the '$J/2 S_i^+ S_j^-$' term; terms[2] is the '$J/2 S_i^- S_j^+$' term. Examples -------- >>> from HamiltonianPy.quantumoperator import HeisenbergFactory >>> term = HeisenbergFactory((0, 0), (1, 1)) >>> print(term[0]) The coefficient of this term: 1.0 The component operators: SpinOperator(otype="z", site=(0, 0)) SpinOperator(otype="z", site=(1, 1)) >>> print(term[1]) The coefficient of this term: 0.5 The component operators: SpinOperator(otype="p", site=(0, 0)) SpinOperator(otype="m", site=(1, 1)) >>> print(term[2]) The coefficient of this term: 0.5 The component operators: SpinOperator(otype="m", site=(0, 0)) SpinOperator(otype="p", site=(1, 1)) """ sz0 = SpinOperator(otype="z", site=site0) sp0 = SpinOperator(otype="p", site=site0) sm0 = SpinOperator(otype="m", site=site0) sz1 = SpinOperator(otype="z", site=site1) sp1 = SpinOperator(otype="p", site=site1) sm1 = SpinOperator(otype="m", site=site1) return ( SpinInteraction((sz0, sz1), coeff=coeff), SpinInteraction((sp0, sm1), coeff=coeff/2), SpinInteraction((sm0, sp1), coeff=coeff/2), ) def IsingFactory(site0, site1, alpha, *, coeff=1.0): """ Generate Ising type spin interaction term: '$J S_i^{\\alpha} S_j^{\\alpha}$' Parameters ---------- site0, site1 : list, tuple or 1D np.ndarray The coordinates of the lattice site on which the spin operator is defined. `site0` and `site1` should be 1D array with length 1, 2 or 3. `site0` for the first spin operator and `site1` for the second spin operator. alpha : {"x", "y" or "z"} Which type of spin operator is involved. coeff : int or float, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- term: SpinInteraction The corresponding spin interaction term. Examples -------- >>> from HamiltonianPy.quantumoperator import IsingFactory >>> term = IsingFactory((0, 0), (1, 1), "x") >>> print(term) The coefficient of this term: 1.0 The component operators: SpinOperator(otype="x", site=(0, 0)) SpinOperator(otype="x", site=(1, 1)) """ assert alpha in ("x", "y", "z") s0_alpha = SpinOperator(otype=alpha, site=site0) s1_alpha = SpinOperator(otype=alpha, site=site1) return SpinInteraction((s0_alpha, s1_alpha), coeff=coeff) def TwoSpinTermFactory(site0, site1, alpha0, alpha1, *, coeff=1.0): """ Generate general two spin interaction term: '$J S_i^{\\alpha} S_j^{\\beta}$' Parameters ---------- site0, site1 : list, tuple or 1D np.ndarray The coordinates of the lattice site on which the spin operator is defined. `site0` and `site1` should be 1D array with length 1, 2 or 3. `site0` for the first spin operator and `site1` for the second spin operator. alpha0, alpha1 : {"x", "y" or "z"} Which type of spin operator is involved. `alpha0` for the first and `alpha1` for the second spin operator. coeff : int or float, optional, keyword-only The coefficient of this term. Default: 1.0. Returns ------- term: SpinInteraction The corresponding spin interaction term. Examples -------- >>> from HamiltonianPy.quantumoperator import TwoSpinTermFactory >>> term = TwoSpinTermFactory((0, 0), (1, 1), alpha0="x", alpha1="y") >>> print(term) The coefficient of this term: 1.0 The component operators: SpinOperator(otype="x", site=(0, 0)) SpinOperator(otype="y", site=(1, 1)) """ assert alpha0 in ("x", "y", "z") assert alpha1 in ("x", "y", "z") s0_alpha = SpinOperator(otype=alpha0, site=site0) s1_alpha = SpinOperator(otype=alpha1, site=site1) return SpinInteraction((s0_alpha, s1_alpha), coeff=coeff)
""" Copyright 2016 Puffin Software. All rights reserved. """ from com.puffinware.pistat.models import User, Location, Category, Thermostat, Sensor, Reading from com.puffinware.pistat import DB from logging import getLogger log = getLogger(__name__) def setup_db(app): DB.create_tables([User, Location, Category, Thermostat, Sensor, Reading], safe=True) # This hook ensures that a connection is opened to handle any queries # generated by the request. @app.before_request def _db_connect(): log.debug('DB Connect') DB.connect() # This hook ensures that the connection is closed when we've finished # processing the request. @app.teardown_request def _db_close(exc): if not DB.is_closed(): log.debug('DB Close') DB.close()
x = int(input()) y = int(input()) print('In this test case x =', x, 'and y =', y) if x >= y: print('(The maximum is x)') theMax = x else: print('(The maximum is y)') theMax = y print('The maximum is', theMax)
from django.conf.urls import url from django.contrib.auth.views import login, \ logout, \ logout_then_login, \ password_change, \ password_change_done, \ password_reset, \ password_reset_done, \ password_reset_confirm, \ password_reset_complete from . import views urlpatterns = [ url(r'^$', views.dashboard, name='dashboard'), # login / logout urls url(r'^login/$', view=login, name='login'), url(r'^logout/$', view=logout, name='logout'), url(r'^logout-then-login/$', view=logout_then_login, name='logout_then_login'), # change password urls url(r'^password-change/$', view=password_change, name='password_change'), url(r'^password-change/done/$', view=password_change_done, name='password_change_done'), # restore password urls url(r'^password-reset/$', view=password_reset, name='password_reset'), url(r'^password-reset/done/$', view=password_reset_done, name='password_reset_done'), url(r'^password-reset/confirm/(?P<uidb64>[-\w]+)/(?P<token>[-\w]+)/$', view=password_reset_confirm, name='password_reset_confirm'), url(r'^password-reset/complete/$', view=password_reset_complete, name='password_reset_complete'), ]
import pygame from Explosion import Explosion class Bullet(object): PLAYER, ENEMY = 1, 0 def __init__(self, manager, parent, init_pos, direction, speed=3): self.manager = manager self.parent = parent self.image = pygame.image.load("res/tanks/bullet.png") self.explosion = pygame.image.load("res/explosions/bullet_explosion.png") self.rect = self.calculate_init_point(direction, init_pos) self.speed = self.calculate_speed(direction, speed) def calculate_speed(self, direction, speed): if direction == 0: # Up return (0, -speed) if direction == 1: # Down self.image = pygame.transform.rotate(self.image, 180) return (0, speed) if direction == 2: # Left self.image = pygame.transform.rotate(self.image, 90) return (-speed, 0) if direction == 3: # Right self.image = pygame.transform.rotate(self.image, -90) return (speed, 0) def calculate_init_point(self, direction, init_pos): rect = self.image.get_rect() posX = init_pos[0] posY = init_pos[1] if direction == 0: rect.x = posX + 12 rect.y = posY - 14 if direction == 1: rect.x = posX + 12 rect.y = posY + 32 if direction == 2: rect.x = posX - 14 rect.y = posY + 12 if direction == 3: rect.x = posX + 32 rect.y = posY + 12 return rect def update(self, blocks): posX = self.speed[0] posY = self.speed[1] self.rect.x += posX self.rect.y += posY # Si nos vamos a salir del mundo, explotamos if self.rect.x < 0: self.rect.x = 0 self.explode() if self.rect.x > 632: self.rect.x = 632 self.explode() if self.rect.y < 0: self.rect.y = 0 self.explode() if self.rect.y > 568: self.rect.y = 568 self.explode() crashed = False # Check if we crashed with another block for block in blocks: # We can't crash with ourselves... can we? if block == self: pass # If we do crash, we tell the manager to destroy said block elif self.rect.colliderect(block): # Right after we check if we can destroy said block block_name = type(block).__name__ if block_name in ["Block", "Heart", "Bullet"]: self.impact_side(block) if self.manager.destroy_element(block): # Block tells us if it destroyed crashed = True else: # Else, we explode self.explode() elif block_name == "Enemy" and self.parent: # Player bullet against enemy self.impact_side(block) # If enemy tells us it destroyed, it's a kill if self.manager.destroy_element(block): self.manager.increment_kills() crashed = True else: # Else, we explode self.explode() elif block_name == "Enemy" and not self.parent: # Enemy bullet hitting enemy crashed = True elif block_name == "Jugador" and not self.parent: # Enemy bullet hitting the player self.impact_side(block) # If the player destroys, we destroy if self.manager.destroy_element(block): crashed = True else: # Else, we explode self.explode() else: pass if crashed: # If we crashed, we destroy ourselves self.destroy() def destroy(self): if self.parent == self.PLAYER: self.manager.remove_player_bullet() self.manager.remove_bullet(self) return True def explode(self): if self.parent == self.PLAYER: self.manager.remove_player_bullet() # Create the explosion Explosion(self.manager, self.rect) self.manager.remove_bullet(self) return True def impact_side(self, block): posX = self.speed[0] posY = self.speed[1] if posX > 0: # Left side self.rect.right = block.rect.left if posX < 0: # Right side self.rect.left = block.rect.right if posY > 0: # Upper side self.rect.bottom = block.rect.top if posY < 0: # Lower side self.rect.top = block.rect.bottom
from rest_framework.viewsets import ModelViewSet from rest_framework.generics import RetrieveAPIView, ListAPIView from django.shortcuts import get_object_or_404 from django.db.models import Q from common.utils import get_logger, get_object_or_none from common.mixins.api import SuggestionMixin from users.models import User, UserGroup from users.serializers import UserSerializer, UserGroupSerializer from users.filters import UserFilter from perms.models import AssetPermission from perms.serializers import AssetPermissionSerializer from perms.filters import AssetPermissionFilter from orgs.mixins.api import OrgBulkModelViewSet from orgs.mixins import generics from assets.api import FilterAssetByNodeMixin from ..models import Asset, Node, Platform from .. import serializers from ..tasks import ( update_assets_hardware_info_manual, test_assets_connectivity_manual, test_system_users_connectivity_a_asset, push_system_users_a_asset ) from ..filters import FilterAssetByNodeFilterBackend, LabelFilterBackend, IpInFilterBackend logger = get_logger(__file__) __all__ = [ 'AssetViewSet', 'AssetPlatformRetrieveApi', 'AssetGatewayListApi', 'AssetPlatformViewSet', 'AssetTaskCreateApi', 'AssetsTaskCreateApi', 'AssetPermUserListApi', 'AssetPermUserPermissionsListApi', 'AssetPermUserGroupListApi', 'AssetPermUserGroupPermissionsListApi', ] class AssetViewSet(SuggestionMixin, FilterAssetByNodeMixin, OrgBulkModelViewSet): """ API endpoint that allows Asset to be viewed or edited. """ model = Asset filterset_fields = { 'hostname': ['exact'], 'ip': ['exact'], 'system_users__id': ['exact'], 'platform__base': ['exact'], 'is_active': ['exact'], 'protocols': ['exact', 'icontains'] } search_fields = ("hostname", "ip") ordering_fields = ("hostname", "ip", "port", "cpu_cores") ordering = ('hostname', ) serializer_classes = { 'default': serializers.AssetSerializer, 'suggestion': serializers.MiniAssetSerializer } rbac_perms = { 'match': 'assets.match_asset' } extra_filter_backends = [FilterAssetByNodeFilterBackend, LabelFilterBackend, IpInFilterBackend] def set_assets_node(self, assets): if not isinstance(assets, list): assets = [assets] node_id = self.request.query_params.get('node_id') if not node_id: return node = get_object_or_none(Node, pk=node_id) if not node: return node.assets.add(*assets) def perform_create(self, serializer): assets = serializer.save() self.set_assets_node(assets) class AssetPlatformRetrieveApi(RetrieveAPIView): queryset = Platform.objects.all() serializer_class = serializers.PlatformSerializer rbac_perms = { 'retrieve': 'assets.view_gateway' } def get_object(self): asset_pk = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_pk) return asset.platform class AssetPlatformViewSet(ModelViewSet): queryset = Platform.objects.all() serializer_class = serializers.PlatformSerializer filterset_fields = ['name', 'base'] search_fields = ['name'] def check_object_permissions(self, request, obj): if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal: self.permission_denied( request, message={"detail": "Internal platform"} ) return super().check_object_permissions(request, obj) class AssetsTaskMixin: def perform_assets_task(self, serializer): data = serializer.validated_data action = data['action'] assets = data.get('assets', []) if action == "refresh": task = update_assets_hardware_info_manual.delay(assets) else: # action == 'test': task = test_assets_connectivity_manual.delay(assets) return task def perform_create(self, serializer): task = self.perform_assets_task(serializer) self.set_task_to_serializer_data(serializer, task) def set_task_to_serializer_data(self, serializer, task): data = getattr(serializer, '_data', {}) data["task"] = task.id setattr(serializer, '_data', data) class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView): model = Asset serializer_class = serializers.AssetTaskSerializer def create(self, request, *args, **kwargs): pk = self.kwargs.get('pk') request.data['asset'] = pk request.data['assets'] = [pk] return super().create(request, *args, **kwargs) def check_permissions(self, request): action = request.data.get('action') action_perm_require = { 'refresh': 'assets.refresh_assethardwareinfo', 'push_system_user': 'assets.push_assetsystemuser', 'test': 'assets.test_assetconnectivity', 'test_system_user': 'assets.test_assetconnectivity' } perm_required = action_perm_require.get(action) has = self.request.user.has_perm(perm_required) if not has: self.permission_denied(request) def perform_asset_task(self, serializer): data = serializer.validated_data action = data['action'] if action not in ['push_system_user', 'test_system_user']: return asset = data['asset'] system_users = data.get('system_users') if not system_users: system_users = asset.get_all_system_users() if action == 'push_system_user': task = push_system_users_a_asset.delay(system_users, asset=asset) elif action == 'test_system_user': task = test_system_users_connectivity_a_asset.delay(system_users, asset=asset) else: task = None return task def perform_create(self, serializer): task = self.perform_asset_task(serializer) if not task: task = self.perform_assets_task(serializer) self.set_task_to_serializer_data(serializer, task) class AssetsTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView): model = Asset serializer_class = serializers.AssetsTaskSerializer class AssetGatewayListApi(generics.ListAPIView): serializer_class = serializers.GatewayWithAuthSerializer rbac_perms = { 'list': 'assets.view_gateway' } def get_queryset(self): asset_id = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_id) if not asset.domain: return [] queryset = asset.domain.gateways.filter(protocol='ssh') return queryset class BaseAssetPermUserOrUserGroupListApi(ListAPIView): def get_object(self): asset_id = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_id) return asset def get_asset_related_perms(self): asset = self.get_object() nodes = asset.get_all_nodes(flat=True) perms = AssetPermission.objects.filter(Q(assets=asset) | Q(nodes__in=nodes)) return perms class AssetPermUserListApi(BaseAssetPermUserOrUserGroupListApi): filterset_class = UserFilter search_fields = ('username', 'email', 'name', 'id', 'source', 'role') serializer_class = UserSerializer def get_queryset(self): perms = self.get_asset_related_perms() users = User.objects.filter( Q(assetpermissions__in=perms) | Q(groups__assetpermissions__in=perms) ).distinct() return users class AssetPermUserGroupListApi(BaseAssetPermUserOrUserGroupListApi): serializer_class = UserGroupSerializer def get_queryset(self): perms = self.get_asset_related_perms() user_groups = UserGroup.objects.filter(assetpermissions__in=perms).distinct() return user_groups class BaseAssetPermUserOrUserGroupPermissionsListApiMixin(generics.ListAPIView): model = AssetPermission serializer_class = AssetPermissionSerializer filterset_class = AssetPermissionFilter search_fields = ('name',) rbac_perms = { 'list': 'perms.view_assetpermission' } def get_object(self): asset_id = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_id) return asset def filter_asset_related(self, queryset): asset = self.get_object() nodes = asset.get_all_nodes(flat=True) perms = queryset.filter(Q(assets=asset) | Q(nodes__in=nodes)) return perms def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) queryset = self.filter_asset_related(queryset) return queryset class AssetPermUserPermissionsListApi(BaseAssetPermUserOrUserGroupPermissionsListApiMixin): def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) queryset = self.filter_user_related(queryset) queryset = queryset.distinct() return queryset def filter_user_related(self, queryset): user = self.get_perm_user() user_groups = user.groups.all() perms = queryset.filter(Q(users=user) | Q(user_groups__in=user_groups)) return perms def get_perm_user(self): user_id = self.kwargs.get('perm_user_id') user = get_object_or_404(User, pk=user_id) return user class AssetPermUserGroupPermissionsListApi(BaseAssetPermUserOrUserGroupPermissionsListApiMixin): def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) queryset = self.filter_user_group_related(queryset) queryset = queryset.distinct() return queryset def filter_user_group_related(self, queryset): user_group = self.get_perm_user_group() perms = queryset.filter(user_groups=user_group) return perms def get_perm_user_group(self): user_group_id = self.kwargs.get('perm_user_group_id') user_group = get_object_or_404(UserGroup, pk=user_group_id) return user_group
""" Perform song identification by loading up a corpus of harmonic analyses and comparing parse results to all of them, according to some distance metric. """ """ ============================== License ======================================== Copyright (C) 2008, 2010-12 University of Edinburgh, Mark Granroth-Wilding This file is part of The Jazz Parser. The Jazz Parser is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Jazz Parser is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with The Jazz Parser. If not, see <http://www.gnu.org/licenses/>. ============================ End license ====================================== """ __author__ = "Mark Granroth-Wilding <mark.granroth-wilding@ed.ac.uk>" import sys from optparse import OptionParser from jazzparser.data.parsing import ParseResults from jazzparser.parsers.cky.parser import DirectedCkyParser from jazzparser.utils.options import options_help_text, ModuleOption from jazzparser.data.tonalspace import TonalSpaceAnalysisSet from jazzparser.formalisms.music_halfspan import Formalism from jazzparser.utils.tableprint import pprint_table def main(): usage = "%prog [options] <song-set> <results-file0> [<results-file1> ...]" parser = OptionParser(usage=usage) parser.add_option("--popt", "--parser-options", dest="popts", action="append", help="specify options for the parser that interprets the gold standard annotations. Type '--popt help' to get a list of options (we use a DirectedCkyParser)") parser.add_option("-m", "--metric", dest="metric", action="store", help="semantics distance metric to use. Use '-m help' for a list of available metrics") parser.add_option("--mopt", "--metric-options", dest="mopts", action="append", help="options to pass to the semantics metric. Use with '--mopt help' with -m to see available options") parser.add_option("-r", "--print-results", dest="print_results", action="store", default=5, type="int", help="number of top search results to print for each query (parse result). Default: 5. Use -1 to print distances from all songs in the corpus") parser.add_option("-g", "--gold-only", dest="gold_only", action="store_true", help="skip results that have no gold standard sequence associated with them (we can't tell which is the right answer for these)") parser.add_option("--mc", "--metric-computation", dest="metric_computation", action="store_true", help="output the computation information for the metric between the parse result and each top search result") options, arguments = parser.parse_args() # For now, we always use the music_halfspan formalism with this script # If we wanted to make it generic, we'd just load the formalism according # to a command-line option formalism = Formalism # Process parser options if options.popts is not None: poptstr = options.popts if "help" in [s.strip().lower() for s in poptstr]: # Output this parser's option help print options_help_text(DirectedCkyParser.PARSER_OPTIONS, intro="Available options for gold standard interpreter") sys.exit(0) poptstr = ":".join(poptstr) else: poptstr = "" popts = ModuleOption.process_option_string(poptstr) # Check that the options are valid try: DirectedCkyParser.check_options(popts) except ModuleOptionError, err: logger.error("Problem with parser options (--popt): %s" % err) sys.exit(1) # Get a distance metric # Just check this, as it'll cause problems if len(formalism.semantics_distance_metrics) == 0: print "ERROR: the formalism defines no distance metrics, so this "\ "script won't work" sys.exit(1) # First get the metric if options.metric == "help": # Print out a list of metrics available print "Available distance metrics:" print ", ".join([metric.name for metric in \ formalism.semantics_distance_metrics]) sys.exit(0) if options.metric is None: # Use the first in the list as default metric_cls = formalism.semantics_distance_metrics[0] else: for m in formalism.semantics_distance_metrics: if m.name == options.metric: metric_cls = m break else: # No metric found matching this name print "No metric '%s'" % options.metric sys.exit(1) print >>sys.stderr, "Using distance metric: %s" % metric_cls.name # Now process the metric options if options.mopts is not None: moptstr = options.mopts if "help" in [s.strip().lower() for s in moptstr]: # Output this parser's option help print options_help_text(metric_cls.OPTIONS, intro="Available options for metric '%s'" % metric_cls.name) sys.exit(0) moptstr = ":".join(moptstr) else: moptstr = "" mopts = ModuleOption.process_option_string(moptstr) # Instantiate the metric with these options metric = metric_cls(options=mopts) if len(arguments) < 2: print >>sys.stderr, "Specify a song corpus name and one or more files to read results from" sys.exit(1) # First argument is an TonalSpaceAnalysisSet corpus_name = arguments[0] # Load the corpus file corpus = TonalSpaceAnalysisSet.load(corpus_name) # The rest of the args are result files to analyze res_files = arguments[1:] # Work out how many results to print out if options.print_results == -1: print_up_to = None else: print_up_to = options.print_results ranks = [] num_ranked = 0 for filename in res_files: # Load the parse results pres = ParseResults.from_file(filename) if options.gold_only and pres.gold_sequence is None: # Skip this sequence altogether if requested continue print "######################" print "Read %s" % filename # Try to get a correct answer from the PR file if pres.gold_sequence is None: print "No correct answer specified in input file" correct_song = None else: # Process the name of the sequence in the same way that # TonalSpaceAnalysisSet does # Ideally, they should make a common function call, but let's be # bad for once correct_song = pres.gold_sequence.string_name.lower() print "Correct answer: %s" % correct_song # Could have an empty result list: skip if it does if len(pres.semantics) == 0: print "No results" # Failed to get any result: if this is one of the sequences that # is in the corpus, count it as a 0 result. Otherwise, skip: # we wouldn't have counted it anyway num_ranked += 1 ranks.append(None) continue result = pres.semantics[0][1] # Compare to each of the songs distances = [] for name,songsem in corpus: # Get the distance from this song dist = metric.distance(result, songsem) distances.append((name,dist,songsem)) # Sort them to get the closest first distances.sort(key=lambda x:x[1]) print # Print out the top results, as many as requested top_results = distances[:print_up_to] table = [["","Song","Distance"]] + [ ["*" if res[0] == correct_song else "", "%s" % res[0], "%.2f" % res[1]] for res in top_results] pprint_table(sys.stdout, table, default_just=True) print if correct_song is not None: # Look for the correct answer in the results for rank,(name,distance,__) in enumerate(distances): # Match up the song name to the correct one if name == correct_song: correct_rank = rank break else: # The song name was not found in the corpus at all correct_rank = None if correct_rank is None: print "Song was not found in corpus" else: print "Correct answer got rank %d" % correct_rank # Record the ranks so we can compute the MRR ranks.append(correct_rank+1) num_ranked += 1 print if options.metric_computation: print "Explanation of top result:" print metric.print_computation(result, distances[0][2]) print if num_ranked: print "\nGot ranks for %d sequences" % num_ranked # Compute the mean reciprocal rank, the reciprocal of the harmonic mean # of the ranks of the correct answers mrr = sum([0.0 if rank is None else 1.0/rank for rank in ranks], 0.0) \ / len(ranks) print "Mean reciprocal rank: %f" % mrr if mrr > 0.0: hmr = 1.0/mrr print "Harmonic mean rank: %f" % hmr succ_ranks = [rank for rank in ranks if rank is not None] print "\nIncluding only successful parses (%d):" % len(succ_ranks) mrr_succ = sum([1.0/rank for rank in succ_ranks], 0.0) / len(succ_ranks) print "Mean reciprocal rank: %f" % mrr_succ if mrr_succ > 0.0: hmr_succ = 1.0/mrr_succ print "Harmonic mean rank: %f" % hmr_succ else: print "\nNo results to analyze" if __name__ == "__main__": main()
#!/usr/bin/env python from FIDATA import * initArgParser('Importer of predefined data', defLogFilename = 'import.log') initFIDATA() from csv import DictReader from os import path from PIL import Image classes = [] logging.info('Import of predefined data started') # Lang(FIDATA, row = row, write = True, tryGetFromDB = False) logging.info('Importing scripts') reader = DictReader(open('scripts.csv', 'r', encoding = 'UTF8'), delimiter = ';') for row in reader: Script(FIDATA, row = row, write = True, tryGetFromDB = False) del reader commit() classes += [Script] logging.info('Importing countries') reader = DictReader(open('countries.csv', 'r', encoding = 'UTF8'), delimiter = ';') for row in reader: # parent_country # associated_with if row['alpha2_code'] == '': row['alpha2_code'] = None else: flagFilename = 'flags\{:s}.png'.format(row['alpha2_code'].lower()) if path.exists(flagFilename): row['flag'] = Image.open(flagFilename) if row['gov_website'] == '': row['gov_website'] = None if row['stats_website'] == '': row['stats_website'] = None FIDATA.country(row = row, write = True, tryGetFromDB = False) del reader commit() classes += [Country] # FIDATA.issuer(row = row, write = True, tryGetFromDB = False) # row['instr_type'] = InstrumentType.Currency # FIDATA.instrument(row = row, write = True, tryGetFromDB = False) # FIDATA.instrument(row = row, write = True, tryGetFromDB = False) logging.info('Importing markets') reader = DictReader(open('markets.csv', 'r', encoding = 'UTF8'), delimiter = ';') child_markets = list() for row in reader: if row['country_alpha2_code'] == '': row['country'] = None else: row['country'] = FIDATA.country(row = { 'alpha2_code': row['country_alpha2_code'], 'name' : row['country_name'] }) if row['acronym'] == '': row['acronym'] = None if row['website'] == '': row['website'] = None if row['trade_organizer_symbol'] == '': FIDATA.market(row = row, write = True, tryGetFromDB = False) else: child_markets.append((FIDATA.market(row = row, write = False, tryGetFromDB = False), row['trade_organizer_symbol'])) del reader for (market, trade_organizer_symbol) in child_markets: market.tradeOrganizer = FIDATA.market(row = {'symbol': trade_organizer_symbol}) market.write() del child_markets commit() classes += [Market] logging.info('Importing data providers') reader = DictReader(open('data_providers.csv', 'r', encoding = 'UTF8'), delimiter = ';') for row in reader: if row['trade_organizer_symbol'] == '': row['trade_organizer'] = None else: row['trade_organizer'] = FIDATA.market(row = {'symbol': row['trade_organizer_symbol']}) FIDATA.dataProvider(row = row, write = True, tryGetFromDB = False) del reader commit() classes += [DataProvider] logging.info('Import of predefined data finished') FIDATA.analyze(classes)
""" SleekXMPP: The Sleek XMPP Library Copyright (C) 2011 Nathanael C. Fritz This file is part of SleekXMPP. See the file LICENSE for copying permission. """ import logging from sleekxmpp.xmlstream import JID from sleekxmpp.xmlstream.handler import Callback from sleekxmpp.xmlstream.matcher import StanzaPath from sleekxmpp.plugins.base import BasePlugin from sleekxmpp.plugins.xep_0060 import stanza log = logging.getLogger(__name__) class XEP_0060(BasePlugin): """ XEP-0060 Publish Subscribe """ name = 'xep_0060' description = 'XEP-0060: Publish-Subscribe' dependencies = set(['xep_0030', 'xep_0004']) stanza = stanza def plugin_init(self): self.node_event_map = {} self.xmpp.register_handler( Callback('Pubsub Event: Items', StanzaPath('message/pubsub_event/items'), self._handle_event_items)) self.xmpp.register_handler( Callback('Pubsub Event: Purge', StanzaPath('message/pubsub_event/purge'), self._handle_event_purge)) self.xmpp.register_handler( Callback('Pubsub Event: Delete', StanzaPath('message/pubsub_event/delete'), self._handle_event_delete)) self.xmpp.register_handler( Callback('Pubsub Event: Configuration', StanzaPath('message/pubsub_event/configuration'), self._handle_event_configuration)) self.xmpp.register_handler( Callback('Pubsub Event: Subscription', StanzaPath('message/pubsub_event/subscription'), self._handle_event_subscription)) def plugin_end(self): self.xmpp.remove_handler('Pubsub Event: Items') self.xmpp.remove_handler('Pubsub Event: Purge') self.xmpp.remove_handler('Pubsub Event: Delete') self.xmpp.remove_handler('Pubsub Event: Configuration') self.xmpp.remove_handler('Pubsub Event: Subscription') def _handle_event_items(self, msg): """Raise events for publish and retraction notifications.""" node = msg['pubsub_event']['items']['node'] multi = len(msg['pubsub_event']['items']) > 1 values = {} if multi: values = msg.values del values['pubsub_event'] for item in msg['pubsub_event']['items']: event_name = self.node_event_map.get(node, None) event_type = 'publish' if item.name == 'retract': event_type = 'retract' if multi: condensed = self.xmpp.Message() condensed.values = values condensed['pubsub_event']['items']['node'] = node condensed['pubsub_event']['items'].append(item) self.xmpp.event('pubsub_%s' % event_type, msg) if event_name: self.xmpp.event('%s_%s' % (event_name, event_type), condensed) else: self.xmpp.event('pubsub_%s' % event_type, msg) if event_name: self.xmpp.event('%s_%s' % (event_name, event_type), msg) def _handle_event_purge(self, msg): """Raise events for node purge notifications.""" node = msg['pubsub_event']['purge']['node'] event_name = self.node_event_map.get(node, None) self.xmpp.event('pubsub_purge', msg) if event_name: self.xmpp.event('%s_purge' % event_name, msg) def _handle_event_delete(self, msg): """Raise events for node deletion notifications.""" node = msg['pubsub_event']['delete']['node'] event_name = self.node_event_map.get(node, None) self.xmpp.event('pubsub_delete', msg) if event_name: self.xmpp.event('%s_delete' % event_name, msg) def _handle_event_configuration(self, msg): """Raise events for node configuration notifications.""" node = msg['pubsub_event']['configuration']['node'] event_name = self.node_event_map.get(node, None) self.xmpp.event('pubsub_config', msg) if event_name: self.xmpp.event('%s_config' % event_name, msg) def _handle_event_subscription(self, msg): """Raise events for node subscription notifications.""" node = msg['pubsub_event']['subscription']['node'] event_name = self.node_event_map.get(node, None) self.xmpp.event('pubsub_subscription', msg) if event_name: self.xmpp.event('%s_subscription' % event_name, msg) def map_node_event(self, node, event_name): """ Map node names to events. When a pubsub event is received for the given node, raise the provided event. For example:: map_node_event('http://jabber.org/protocol/tune', 'user_tune') will produce the events 'user_tune_publish' and 'user_tune_retract' when the respective notifications are received from the node 'http://jabber.org/protocol/tune', among other events. Arguments: node -- The node name to map to an event. event_name -- The name of the event to raise when a notification from the given node is received. """ self.node_event_map[node] = event_name def create_node(self, jid, node, config=None, ntype=None, ifrom=None, block=True, callback=None, timeout=None): """ Create and configure a new pubsub node. A server MAY use a different name for the node than the one provided, so be sure to check the result stanza for a server assigned name. If no configuration form is provided, the node will be created using the server's default configuration. To get the default configuration use get_node_config(). Arguments: jid -- The JID of the pubsub service. node -- Optional name of the node to create. If no name is provided, the server MAY generate a node ID for you. The server can also assign a different name than the one you provide; check the result stanza to see if the server assigned a name. config -- Optional XEP-0004 data form of configuration settings. ntype -- The type of node to create. Servers typically default to using 'leaf' if no type is provided. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub']['create']['node'] = node if config is not None: form_type = 'http://jabber.org/protocol/pubsub#node_config' if 'FORM_TYPE' in config['fields']: config.field['FORM_TYPE']['value'] = form_type else: config.add_field(var='FORM_TYPE', ftype='hidden', value=form_type) if ntype: if 'pubsub#node_type' in config['fields']: config.field['pubsub#node_type']['value'] = ntype else: config.add_field(var='pubsub#node_type', value=ntype) iq['pubsub']['configure'].append(config) return iq.send(block=block, callback=callback, timeout=timeout) def subscribe(self, jid, node, bare=True, subscribee=None, options=None, ifrom=None, block=True, callback=None, timeout=None): """ Subscribe to updates from a pubsub node. The rules for determining the JID that is subscribing to the node are: 1. If subscribee is given, use that as provided. 2. If ifrom was given, use the bare or full version based on bare. 3. Otherwise, use self.xmpp.boundjid based on bare. Arguments: jid -- The pubsub service JID. node -- The node to subscribe to. bare -- Indicates if the subscribee is a bare or full JID. Defaults to True for a bare JID. subscribee -- The JID that is subscribing to the node. options -- ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub']['subscribe']['node'] = node if subscribee is None: if ifrom: if bare: subscribee = JID(ifrom).bare else: subscribee = ifrom else: if bare: subscribee = self.xmpp.boundjid.bare else: subscribee = self.xmpp.boundjid iq['pubsub']['subscribe']['jid'] = subscribee if options is not None: iq['pubsub']['options'].append(options) return iq.send(block=block, callback=callback, timeout=timeout) def unsubscribe(self, jid, node, subid=None, bare=True, subscribee=None, ifrom=None, block=True, callback=None, timeout=None): """ Unubscribe from updates from a pubsub node. The rules for determining the JID that is unsubscribing from the node are: 1. If subscribee is given, use that as provided. 2. If ifrom was given, use the bare or full version based on bare. 3. Otherwise, use self.xmpp.boundjid based on bare. Arguments: jid -- The pubsub service JID. node -- The node to subscribe to. subid -- The specific subscription, if multiple subscriptions exist for this JID/node combination. bare -- Indicates if the subscribee is a bare or full JID. Defaults to True for a bare JID. subscribee -- The JID that is subscribing to the node. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub']['unsubscribe']['node'] = node if subscribee is None: if ifrom: if bare: subscribee = JID(ifrom).bare else: subscribee = ifrom else: if bare: subscribee = self.xmpp.boundjid.bare else: subscribee = self.xmpp.boundjid iq['pubsub']['unsubscribe']['jid'] = subscribee iq['pubsub']['unsubscribe']['subid'] = subid return iq.send(block=block, callback=callback, timeout=timeout) def get_subscriptions(self, jid, node=None, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') iq['pubsub']['subscriptions']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def get_affiliations(self, jid, node=None, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') iq['pubsub']['affiliations']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def get_subscription_options(self, jid, node=None, user_jid=None, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') if user_jid is None: iq['pubsub']['default']['node'] = node else: iq['pubsub']['options']['node'] = node iq['pubsub']['options']['jid'] = user_jid return iq.send(block=block, callback=callback, timeout=timeout) def set_subscription_options(self, jid, node, user_jid, options, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') iq['pubsub']['options']['node'] = node iq['pubsub']['options']['jid'] = user_jid iq['pubsub']['options'].append(options) return iq.send(block=block, callback=callback, timeout=timeout) def get_node_config(self, jid, node=None, ifrom=None, block=True, callback=None, timeout=None): """ Retrieve the configuration for a node, or the pubsub service's default configuration for new nodes. Arguments: jid -- The JID of the pubsub service. node -- The node to retrieve the configuration for. If None, the default configuration for new nodes will be requested. Defaults to None. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') if node is None: iq['pubsub_owner']['default'] else: iq['pubsub_owner']['configure']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def get_node_subscriptions(self, jid, node, ifrom=None, block=True, callback=None, timeout=None): """ Retrieve the subscriptions associated with a given node. Arguments: jid -- The JID of the pubsub service. node -- The node to retrieve subscriptions from. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') iq['pubsub_owner']['subscriptions']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def get_node_affiliations(self, jid, node, ifrom=None, block=True, callback=None, timeout=None): """ Retrieve the affiliations associated with a given node. Arguments: jid -- The JID of the pubsub service. node -- The node to retrieve affiliations from. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') iq['pubsub_owner']['affiliations']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def delete_node(self, jid, node, ifrom=None, block=True, callback=None, timeout=None): """ Delete a a pubsub node. Arguments: jid -- The JID of the pubsub service. node -- The node to delete. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub_owner']['delete']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def set_node_config(self, jid, node, config, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub_owner']['configure']['node'] = node iq['pubsub_owner']['configure']['form'].values = config.values return iq.send(block=block, callback=callback, timeout=timeout) def publish(self, jid, node, id=None, payload=None, options=None, ifrom=None, block=True, callback=None, timeout=None): """ Add a new item to a node, or edit an existing item. For services that support it, you can use the publish command as an event signal by not including an ID or payload. When including a payload and you do not provide an ID then the service will generally create an ID for you. Publish options may be specified, and how those options are processed is left to the service, such as treating the options as preconditions that the node's settings must match. Arguments: jid -- The JID of the pubsub service. node -- The node to publish the item to. id -- Optionally specify the ID of the item. payload -- The item content to publish. options -- A form of publish options. ifrom -- Specify the sender's JID. block -- Specify if the send call will block until a response is received, or a timeout occurs. Defaults to True. timeout -- The length of time (in seconds) to wait for a response before exiting the send call if blocking is used. Defaults to sleekxmpp.xmlstream.RESPONSE_TIMEOUT callback -- Optional reference to a stream handler function. Will be executed when a reply stanza is received. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub']['publish']['node'] = node if id is not None: iq['pubsub']['publish']['item']['id'] = id if payload is not None: iq['pubsub']['publish']['item']['payload'] = payload iq['pubsub']['publish_options'] = options return iq.send(block=block, callback=callback, timeout=timeout) def retract(self, jid, node, id, notify=None, ifrom=None, block=True, callback=None, timeout=None): """ Delete a single item from a node. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub']['retract']['node'] = node iq['pubsub']['retract']['notify'] = notify iq['pubsub']['retract']['item']['id'] = id return iq.send(block=block, callback=callback, timeout=timeout) def purge(self, jid, node, ifrom=None, block=True, callback=None, timeout=None): """ Remove all items from a node. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub_owner']['purge']['node'] = node return iq.send(block=block, callback=callback, timeout=timeout) def get_nodes(self, *args, **kwargs): """ Discover the nodes provided by a Pubsub service, using disco. """ return self.xmpp['xep_0030'].get_items(*args, **kwargs) def get_item(self, jid, node, item_id, ifrom=None, block=True, callback=None, timeout=None): """ Retrieve the content of an individual item. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') item = stanza.Item() item['id'] = item_id iq['pubsub']['items']['node'] = node iq['pubsub']['items'].append(item) return iq.send(block=block, callback=callback, timeout=timeout) def get_items(self, jid, node, item_ids=None, max_items=None, iterator=False, ifrom=None, block=False, callback=None, timeout=None): """ Request the contents of a node's items. The desired items can be specified, or a query for the last few published items can be used. Pubsub services may use result set management for nodes with many items, so an iterator can be returned if needed. """ iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='get') iq['pubsub']['items']['node'] = node iq['pubsub']['items']['max_items'] = max_items if item_ids is not None: for item_id in item_ids: item = stanza.Item() item['id'] = item_id iq['pubsub']['items'].append(item) if iterator: return self.xmpp['xep_0059'].iterate(iq, 'pubsub') else: return iq.send(block=block, callback=callback, timeout=timeout) def get_item_ids(self, jid, node, ifrom=None, block=True, callback=None, timeout=None, iterator=False): """ Retrieve the ItemIDs hosted by a given node, using disco. """ return self.xmpp['xep_0030'].get_items(jid, node, ifrom=ifrom, block=block, callback=callback, timeout=timeout, iterator=iterator) def modify_affiliations(self, jid, node, affiliations=None, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub_owner']['affiliations']['node'] = node if affiliations is None: affiliations = [] for jid, affiliation in affiliations: aff = stanza.OwnerAffiliation() aff['jid'] = jid aff['affiliation'] = affiliation iq['pubsub_owner']['affiliations'].append(aff) return iq.send(block=block, callback=callback, timeout=timeout) def modify_subscriptions(self, jid, node, subscriptions=None, ifrom=None, block=True, callback=None, timeout=None): iq = self.xmpp.Iq(sto=jid, sfrom=ifrom, stype='set') iq['pubsub_owner']['subscriptions']['node'] = node if subscriptions is None: subscriptions = [] for jid, subscription in subscriptions: sub = stanza.OwnerSubscription() sub['jid'] = jid sub['subscription'] = subscription iq['pubsub_owner']['subscriptions'].append(sub) return iq.send(block=block, callback=callback, timeout=timeout)
import re import sys from argparse import ArgumentParser from gi.repository import Gtk from gi.repository import Gdk from gi.repository import Gio from gi.repository import GLib from gi.repository import Soup from redditisgtk.sublist import SubList from redditisgtk.subentry import SubEntry from redditisgtk.api import RedditAPI, APIFactory from redditisgtk.webviews import (FullscreenableWebview, ProgressContainer, WebviewToolbar) from redditisgtk.readcontroller import get_read_controller from redditisgtk.identity import IdentityController from redditisgtk.identitybutton import IdentityButton from redditisgtk.comments import CommentsView from redditisgtk.settings import get_settings, show_settings from redditisgtk import webviews VIEW_WEB = 0 VIEW_COMMENTS = 1 class RedditWindow(Gtk.Window): def __init__( self, ic: IdentityController, api_factory: APIFactory, start_sub: str = None): Gtk.Window.__init__(self, title='Something For Reddit', icon_name='today.sam.reddit-is-gtk') self.add_events(Gdk.EventMask.KEY_PRESS_MASK) self.set_default_size(600, 600) self.set_wmclass("reddit-is-gtk", "Something For Reddit") self._ic = ic self._ic.token_changed.connect(self._token_changed_cb) self._api = None self._api_factory = api_factory settings = Gtk.Settings.get_default() screen = Gdk.Screen.get_default() css_provider = Gtk.CssProvider.get_default() if settings.props.gtk_application_prefer_dark_theme: css_provider.load_from_resource( '/today/sam/reddit-is-gtk/style.dark.css') else: css_provider.load_from_resource( '/today/sam/reddit-is-gtk/style.css') context = Gtk.StyleContext() context.add_provider_for_screen(screen, css_provider, Gtk.STYLE_PROVIDER_PRIORITY_USER) self._paned = Gtk.Paned.new(Gtk.Orientation.HORIZONTAL) self.add(self._paned) self._paned.show() self._webview = FullscreenableWebview() self._webview_bin = ProgressContainer(self._webview) self._comments = None self._stack = Gtk.Stack() self._stack.connect('notify::visible-child', self.__stack_child_cb) self._paned.add2(self._stack) #self._paned.child_set_property(self._stack, 'shrink', True) self._stack.show() self._sublist_bin = Gtk.Box() self._paned.add1(self._sublist_bin) self._sublist_bin.show() self._sublist = None self._make_header() left = Gtk.SizeGroup(mode=Gtk.SizeGroupMode.HORIZONTAL) left.add_widget(self._left_header) left.add_widget(self._sublist_bin) self._paned.connect('notify::position', self.__notify_position_cb, self._header_paned) self._header_paned.connect('notify::position', self.__notify_position_cb, self._paned) self._token_changed_cb(self._ic) def _token_changed_cb(self, ic): api = self._api_factory.get_for_token(self._ic.active_token) if self._api != api: self.connect_api(api) def connect_api(self, api: RedditAPI): start_sub = None if start_sub is None: start_sub = get_settings()['default-sub'] if self._api is not None: # TODO: swap right panel print('Swapping', self._api, 'for', api) start_sub = self._sublist.get_uri() # FIXME: do we need to disconnect the callbacks? self._sublist.destroy() self._subentry.destroy() self._api = api self._api.request_failed.connect(self.__request_failed_cb) self._sublist = SubList(self._api, start_sub) self._sublist.new_other_pane.connect(self.__new_other_pane_cb) self._sublist_bin.add(self._sublist) #self._paned.child_set_property(self._sublist, 'shrink', True) self._sublist.show() self._subentry = SubEntry(self._api, start_sub) self._subentry.activate.connect(self.__subentry_activate_cb) self._subentry.escape_me.connect(self.__subentry_escape_me_cb) self._left_header.props.custom_title = self._subentry self._subentry.show() def __request_failed_cb(self, api, msg, info): dialog = Gtk.Dialog(use_header_bar=True) label = Gtk.Label(label=info) dialog.get_content_area().add(label) label.show() dialog.add_button('Retry', Gtk.ResponseType.ACCEPT) dialog.add_button(':shrug-shoulders:', Gtk.ResponseType.REJECT) dialog.set_default_response(Gtk.ResponseType.ACCEPT) dialog.props.transient_for = self response = dialog.run() if response == Gtk.ResponseType.ACCEPT: self._api.resend_message(msg) dialog.destroy() def do_event(self, event): if event.type != Gdk.EventType.KEY_PRESS: return if isinstance(self.get_focus(), (Gtk.TextView, Gtk.Entry)): return if event.keyval == Gdk.KEY_F6: self._subentry.focus() return True if event.keyval == Gdk.KEY_1: self._sublist.focus() return True if event.keyval == Gdk.KEY_2: self._stack.set_visible_child(self._comments) self._comments.focus() return True if event.keyval == Gdk.KEY_3: self._stack.set_visible_child(self._webview_bin) self._webview.grab_focus() return True if event.state & Gdk.ModifierType.MOD1_MASK: if event.keyval == Gdk.KEY_Left: self._webview.go_back() return True if event.keyval == Gdk.KEY_Right: self._webview.go_forward() return True def __new_other_pane_cb(self, sublist, link, comments, link_first): if self._comments is not None: self._stack.remove(self._comments) self._stack.remove(self._webview_bin) self._comments = comments if self._comments is not None: self._stack.add_titled(self._comments, 'comments', 'Comments') self._comments.show() self._stack.add_titled(self._webview_bin, 'web', 'Web') self._webview_bin.show() self._webview.show() self._paned.position = 400 # TODO: constant if link_first and link: self._stack.set_visible_child(self._webview_bin) self._webview.load_uri(link) else: self._stack.set_visible_child(self._comments) if link is not None: self._webview.load_when_visible(link) def load_uri_from_label(self, uri): is_relative = not uri.startswith('http') is_reddit = re.match('https?:\/\/(www\.|np\.)?reddit\.com\/', uri) if is_relative or is_reddit: self.goto_reddit_uri(uri) return self._stack.set_visible_child(self._webview_bin) self._webview.load_uri(uri) def __notify_position_cb(self, caller, pspec, other): other.props.position = caller.props.position def _make_header(self): self._header_paned = Gtk.Paned() self.set_titlebar(self._header_paned) self._left_header = Gtk.HeaderBar() layout = Gtk.Settings.get_default().props.gtk_decoration_layout self._left_header.set_decoration_layout(layout.split(':')[0]) self._right_header = Gtk.HeaderBar() self._right_header.set_decoration_layout(':'+layout.split(':')[1]) self._right_header.props.show_close_button = True self._header_paned.add1(self._left_header) self._header_paned.child_set_property( self._left_header, 'shrink', False) self._header_paned.add2(self._right_header) self._header_paned.child_set_property( self._right_header, 'shrink', False) self._header_paned.show_all() self._identity = IdentityButton(self._ic) self._right_header.pack_start(self._identity) self._identity.show() self._stack_switcher = Gtk.StackSwitcher(stack=self._stack) self._right_header.pack_end(self._stack_switcher) self._stack_switcher.show() self._webview_toolbar = WebviewToolbar(self._webview) self._right_header.pack_end(self._webview_toolbar) def __stack_child_cb(self, stack, pspec): self._webview_toolbar.props.visible = \ stack.props.visible_child == self._webview_bin def get_sublist(self): return self._sublist def get_comments_view(self): return self._comments def goto_sublist(self, to): ''' Public api for children: widget.get_toplevel().goto_sublist('/u/samdroid_/overview') ''' self._sublist.goto(to) self._subentry.goto(to) def goto_reddit_uri(self, uri): ''' Go to a reddit.com uri, eg. "https://www.reddit.com/r/rct" ''' for cond in ['https://', 'http://', 'www.', 'np.', 'reddit.com']: if uri.startswith(cond): uri = uri[len(cond):] # Disregard the '' before the leading / parts = uri.split('/')[1:] if len(parts) <= 3: # /u/*/*, /r/*, /r/*/*(sorting) self.goto_sublist(uri) elif parts[2] == 'comments': self.goto_sublist('/r/{}/'.format(parts[1])) cv = CommentsView(self._api, permalink=uri) cv.got_post_data.connect(self.__cv_got_post_data_cb) self.__new_other_pane_cb(None, None, cv, False) def __cv_got_post_data_cb(self, cv, post): if not post.get('is_self') and 'url' in post: self.__new_other_pane_cb(None, post['url'], cv, True) def __subentry_activate_cb(self, entry, sub): self._sublist.goto(sub) self._sublist.focus() def __subentry_escape_me_cb(self, entry): self._sublist.focus() class Application(Gtk.Application): def __init__(self, ic: IdentityController, api_factory: APIFactory): Gtk.Application.__init__(self, application_id='today.sam.reddit-is-gtk') self.connect('startup', self.__do_startup_cb) GLib.set_application_name("Something For Reddit") GLib.set_prgname("reddit-is-gtk") self._w = None self._queue_uri = None self._ic = ic self._api_factory = api_factory def do_activate(self): self._w = RedditWindow(self._ic, self._api_factory) self.add_window(self._w) self._w.show() if self._queue_uri is not None: self._w.goto_reddit_uri(self._queue_uri) self._queue_uri = None def goto_reddit_uri(self, uri): if self._w is None: self._queue_uri = uri else: self._w.goto_reddit_uri(uri) # TODO: Using do_startup causes SIGSEGV for me def __do_startup_cb(self, app): actions = [('about', self.__about_cb), ('quit', self.__quit_cb), ('issues', self.__issues_cb), ('shortcuts', self.__shortcuts_cb), ('settings', self.__settings_cb)] for name, cb in actions: a = Gio.SimpleAction.new(name, None) a.connect('activate', cb) self.add_action(a) builder = Gtk.Builder.new_from_resource( '/today/sam/reddit-is-gtk/app-menu.ui') self._menu = builder.get_object('app-menu') self.props.app_menu = self._menu def __about_cb(self, action, param): about_dialog = Gtk.AboutDialog( program_name='Something for Reddit', comments=('A simple but powerful Reddit client, built for GNOME ' 'powered by Gtk+ 3.0'), license_type=Gtk.License.GPL_3_0, logo_icon_name='today.sam.reddit-is-gtk', authors=['Sam P. <sam@sam.today>'], website='https://github.com/samdroid-apps/something-for-reddit', website_label='Git Repo and Issue Tracker on GitHub', # VERSION: version='0.2.2 - “The Bugfix Release ⓇⒺⒹⓊⓍ”', transient_for=self._w, modal=True) about_dialog.present() def __issues_cb(self, action, param): webviews.open_uri_external( 'https://github.com/samdroid-apps/something-for-reddit/issues') def __quit_cb(self, action, param): self.quit() def __shortcuts_cb(self, action, param): builder = Gtk.Builder.new_from_resource( '/today/sam/reddit-is-gtk/shortcuts-window.ui') builder.get_object('window').show() def __settings_cb(self, action, param): show_settings() def run(): parser = ArgumentParser( description='Something For Reddit - a Gtk+ Reddit Client') parser.add_argument('uri', help='Reddit.com URI to open, or None', default=None, nargs='?') parser.add_argument('--dark', help='Force Gtk+ dark theme', action='store_true') args = parser.parse_args() settings = Gtk.Settings.get_default() theme = get_settings()['theme'] if theme == 'dark': settings.props.gtk_application_prefer_dark_theme = True elif theme == 'light': settings.props.gtk_application_prefer_dark_theme = False if args.dark: settings.props.gtk_application_prefer_dark_theme = True session = Soup.Session() ic = IdentityController(session) api_factory = APIFactory(session) a = Application(ic, api_factory) if args.uri is not None: a.goto_reddit_uri(args.uri) status = a.run() get_read_controller().save() sys.exit(status)
"""XBeeModem.py bypasses the XBee's 802.15.4 capabilities and simply uses it modem for communications You don't have to master 802.15.4 and a large set of XBee commands to make a very simple but potentially useful network. At its core, the XBee radio is a modem and you can use it directly for simple serial communications. Reference Materials: Non-blocking read from stdin in python - http://repolinux.wordpress.com/2012/10/09/non-blocking-read-from-stdin-in-python/ Non-blocking read on a subprocess.PIPE in python - http://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python Originally Created By: Jeff Irland (jeff.irland@gmail.com) in March 2013 """ import sys # provides access to some variables used or maintained by the interpreter import time # provides various time-related functions from serial import Serial from webiopi.clients import PiHttpClient, Macro from webiopi.utils.logger import exception, setDebug, info, debug, logToFile from webiopi.utils.thread import stop VERSION = '1.0' def displayHelp(): print("Xbee command-line usage") print("xbee [-h] [-c config] [-l log] [-d] [port]") print("") print("Options:") print(" -h, --help Display this help") print(" -c, --config file Load config from file") print(" -l, --log file Log to file") print(" -d, --debug Enable DEBUG") print("") print("Arguments:") print(" port WebIOPi port") exit() def main(argv): port = 8000 configfile = None logfile = None i = 1 while i < len(argv): if argv[i] in ["-c", "-C", "--config-file"]: configfile = argv[i+1] i+=1 elif argv[i] in ["-l", "-L", "--log-file"]: logfile = argv[i+1] i+=1 elif argv[i] in ["-h", "-H", "--help"]: displayHelp() elif argv[i] in ["-d", "--debug"]: setDebug() else: try: port = int(argv[i]) except ValueError: displayHelp() i+=1 if logfile: logToFile(logfile) info("Starting XBee %s" % VERSION) # setup serial serial = Serial() serial.port = '/dev/ttyAMA0' serial.baudrate = 9600 serial.timeout = 1 serial.writeTimeout = 1 serial.open() # disregard any pending data in xbee buffer serial.flushInput() # force to show xbee boot menu time.sleep(.5) serial.writelines("\r") time.sleep(.5) # read menu while serial.inWaiting() > 0: debug("%s" % serial.readline()) # trigger bypass automatically serial.writelines("B") # post startup message to other XBee's and at stdout #serial.writelines("RPi #1 is up and running.\r\n") info("RPi #1 is up and running.") try: while True: waitToSend = True # read a line from XBee and convert it from b'xxx\r\n' to xxx and send to webiopi while serial.inWaiting() > 0: try: line = serial.readline().decode('utf-8').strip('\n\r') if line: waitToSend = False debug("Received: %s" % line) try: client = PiHttpClient("127.0.0.1") macro = Macro(client, "setCarInfo") macro.call(line.replace(",", "%2C")) except: exception("setting car info failed!") except KeyboardInterrupt: raise except Exception as e: exception(e) time.sleep(1.) try: time.sleep(1.) client = PiHttpClient("127.0.0.1") macro = Macro(client, "getPitInfo") data = macro.call() if data: debug("Sending: %s" % data) serial.writelines(data + "\n") except KeyboardInterrupt: raise except Exception as e: exception(e) time.sleep(1.) except KeyboardInterrupt: info("*** Ctrl-C keyboard interrupt ***") if __name__ == "__main__": try: main(sys.argv) except Exception as e: exception(e) stop() info("RPi #1 is going down")
from random import random, randint from PIL import Image, ImageDraw, ImageFont import perlin def draw_background(setup) : canvas = setup['canvas'] image = Image.new('RGBA', canvas, tuple(setup['color']['back'])) background = Image.new('RGBA', canvas, (0,0,0,0)) draw = ImageDraw.Draw(background) stars = [[ int(p * random()) for p in canvas ] for x in range(400) ] scale = lambda x, r : x + r * (min(canvas) / 320) color = (255, 255, 255, 100) for x, y in stars : r = random() draw.ellipse([x, y, scale(x, r), scale(y, r)], fill=color) return Image.alpha_composite(image, background) def apply_noise(image, setup) : generator = perlin.Perlin() octaves = 5 persistence = 5 coef = 30 width, height = setup['canvas'][0], setup['canvas'][1] list_of_pixels = list(image.getdata()) for i, pixel in enumerate(list_of_pixels) : if pixel != (0, 0, 0, 0) : noise = generator.OctavePerlin((i % width) / coef, i / (height * coef), 0, 1, 5) new_pixel = [ int(x * (1 + noise)) for x in pixel[:3] ] new_pixel.append(pixel[3]) list_of_pixels[i] = tuple(new_pixel) image = Image.new(image.mode, image.size) image.putdata(list_of_pixels) return image def apply_ray_effect(sun_image, setup) : canvas = setup['canvas'] width, height = setup['canvas'][0], setup['canvas'][1] decay = 0.8 density = 1.2 samples = 128 center = [ x / 2 for x in setup['canvas'] ] list_of_pixels = list(sun_image.getdata()) new_image = [] print("starting postprocessing...") for y in range(height) : print("\rjob completed {0:.2f}%".format(round(100 * (y / height), 2)), flush=True, end="") for x in range(width) : tc = [x, y] delta = [ (x - center[0]) / (samples * density), (y - center[1]) / (samples * density) ] color = list_of_pixels[x + y * width] illumination = 1 for m in range(samples) : tc = [ tc[0] - delta[0], tc[1] - delta[1]] add_color = tuple( illumination * x for x in list_of_pixels[int(tc[0]) + int(tc[1]) * width] ) illumination *= decay color = tuple( x + y for x, y in zip(color, add_color)) new_image.append(tuple(int(x) for x in color)) image = Image.new(sun_image.mode, sun_image.size) image.putdata(new_image) return image def draw_sun(image, setup) : canvas = setup['canvas'] sun_image = Image.new('RGBA', canvas, (0,0,0,0)) draw = ImageDraw.Draw(sun_image) draw.ellipse(setup['sun'], fill=tuple(setup['color']['base'])) sun_image = apply_noise(sun_image, setup) sun_image = apply_ray_effect(sun_image, setup) return Image.alpha_composite(image, sun_image) def create_sun(setup) : canvas, size = setup['canvas'], setup['size'] d = min([x * 0.08 * 5 * size for x in canvas]) planet = [ (x - d) / 2 for x in canvas ] planet.append(planet[0] + d) planet.append(planet[1] + d) setup['sun'] = planet setup['diam'] = d setup['rad'] = d / 2 setup['center'] = [ planet[0] + d / 2, planet[1] + d / 2 ] def sun_setup(setup) : tmp_setup = {} tmp_setup['color'] = {} tmp_setup['color']['base'] = setup[2] tmp_setup['color']['back'] = [ int(x * 0.05) for x in setup[2] ] tmp_setup['canvas'] = [ x * 2 for x in setup[0] ] tmp_setup['size'] = setup[1] / (255 * 2) return tmp_setup def sun(setup) : setup = sun_setup(setup) create_sun(setup) image = draw_background(setup) image = draw_sun(image, setup) canvas = [ int(x / 2) for x in setup['canvas'] ] resized = image.resize(canvas, Image.ANTIALIAS) resized.save("test.png") setup = ((1200, 750), 128, (180, 120, 100)) sun(setup)
#!/usr/bin/python import pygame import math import random import sys import PixelPerfect from pygame.locals import * from water import Water from menu import Menu from game import Game from highscores import Highscores from options import Options import util from locals import * import health import cloud import mine import steamboat import pirateboat import shark import seagull def init(): health.init() steamboat.init() shark.init() pirateboat.init() cloud.init() mine.init() seagull.init() def main(): global SCREEN_FULLSCREEN pygame.init() util.load_config() if len(sys.argv) > 1: for arg in sys.argv: if arg == "-np": Variables.particles = False elif arg == "-na": Variables.alpha = False elif arg == "-nm": Variables.music = False elif arg == "-ns": Variables.sound = False elif arg == "-f": SCREEN_FULLSCREEN = True scr_options = 0 if SCREEN_FULLSCREEN: scr_options += FULLSCREEN screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT),scr_options ,32) pygame.display.set_icon(util.load_image("kuvake")) pygame.display.set_caption("Trip on the Funny Boat") init() joy = None if pygame.joystick.get_count() > 0: joy = pygame.joystick.Joystick(0) joy.init() try: util.load_music("JDruid-Trip_on_the_Funny_Boat") if Variables.music: pygame.mixer.music.play(-1) except: # It's not a critical problem if there's no music pass pygame.time.set_timer(NEXTFRAME, 1000 / FPS) # 30 fps Water.global_water = Water() main_selection = 0 while True: main_selection = Menu(screen, ("New Game", "High Scores", "Options", "Quit"), main_selection).run() if main_selection == 0: # New Game selection = Menu(screen, ("Story Mode", "Endless Mode")).run() if selection == 0: # Story score = Game(screen).run() Highscores(screen, score).run() elif selection == 1: # Endless score = Game(screen, True).run() Highscores(screen, score, True).run() elif main_selection == 1: # High Scores selection = 0 while True: selection = Menu(screen, ("Story Mode", "Endless Mode", "Endless Online"), selection).run() if selection == 0: # Story Highscores(screen).run() elif selection == 1: # Endless Highscores(screen, endless = True).run() elif selection == 2: # Online Highscores(screen, endless = True, online = True).run() else: break elif main_selection == 2: # Options selection = Options(screen).run() else: #if main_selection == 3: # Quit return if __name__ == '__main__': main()
""" Turn pcap into csv file. Extract timestamp, source IP address, and query name of all DNS queries in the given pcap, and turn it into a CSV. """ import sys import scapy.all as scapy MEASUREMENT_HOSTS = frozenset(["92.243.1.186", "198.83.85.34"]) def process_file(pcap_file): packets = scapy.rdpcap(pcap_file) for packet in packets: if not packet.haslayer(scapy.IP): continue if not packet.haslayer(scapy.DNSQR): continue query = packet[scapy.DNSQR].qname src_addr = packet[scapy.IP].src # Skip DNS response. if src_addr in MEASUREMENT_HOSTS: continue print "%s,%s,%s" % (packet.time, packet[scapy.IP].src, query.lower()) return 0 if __name__ == "__main__": if len(sys.argv) != 2: print >> sys.stderr, "\nUsage: %s PCAP_FILE\n" % sys.argv[0] sys.exit(1) pcap_file = sys.argv[1] sys.exit(process_file(pcap_file))
"""State describing the conversion to momentum transfer""" from __future__ import (absolute_import, division, print_function) import json import copy from sans.state.state_base import (StateBase, rename_descriptor_names, BoolParameter, PositiveFloatParameter, ClassTypeParameter, StringParameter) from sans.common.enums import (ReductionDimensionality, RangeStepType, SANSFacility) from sans.state.state_functions import (is_pure_none_or_not_none, is_not_none_and_first_larger_than_second, validation_message) from sans.state.automatic_setters import (automatic_setters) @rename_descriptor_names class StateConvertToQ(StateBase): reduction_dimensionality = ClassTypeParameter(ReductionDimensionality) use_gravity = BoolParameter() gravity_extra_length = PositiveFloatParameter() radius_cutoff = PositiveFloatParameter() wavelength_cutoff = PositiveFloatParameter() # 1D settings q_min = PositiveFloatParameter() q_max = PositiveFloatParameter() q_1d_rebin_string = StringParameter() # 2D settings q_xy_max = PositiveFloatParameter() q_xy_step = PositiveFloatParameter() q_xy_step_type = ClassTypeParameter(RangeStepType) # ----------------------- # Q Resolution specific # --------------------- use_q_resolution = BoolParameter() q_resolution_collimation_length = PositiveFloatParameter() q_resolution_delta_r = PositiveFloatParameter() moderator_file = StringParameter() # Circular aperture settings q_resolution_a1 = PositiveFloatParameter() q_resolution_a2 = PositiveFloatParameter() # Rectangular aperture settings q_resolution_h1 = PositiveFloatParameter() q_resolution_h2 = PositiveFloatParameter() q_resolution_w1 = PositiveFloatParameter() q_resolution_w2 = PositiveFloatParameter() def __init__(self): super(StateConvertToQ, self).__init__() self.reduction_dimensionality = ReductionDimensionality.OneDim self.use_gravity = False self.gravity_extra_length = 0.0 self.use_q_resolution = False self.radius_cutoff = 0.0 self.wavelength_cutoff = 0.0 def validate(self): is_invalid = {} # 1D Q settings if not is_pure_none_or_not_none([self.q_min, self.q_max]): entry = validation_message("The q boundaries for the 1D reduction are inconsistent.", "Make sure that both q boundaries are set (or none).", {"q_min": self.q_min, "q_max": self.q_max}) is_invalid.update(entry) if is_not_none_and_first_larger_than_second([self.q_min, self.q_max]): entry = validation_message("Incorrect q bounds for 1D reduction.", "Make sure that the lower q bound is smaller than the upper q bound.", {"q_min": self.q_min, "q_max": self.q_max}) is_invalid.update(entry) if self.reduction_dimensionality is ReductionDimensionality.OneDim: if self.q_min is None or self.q_max is None: entry = validation_message("Q bounds not set for 1D reduction.", "Make sure to set the q boundaries when using a 1D reduction.", {"q_min": self.q_min, "q_max": self.q_max}) is_invalid.update(entry) if self.q_1d_rebin_string is not None: if self.q_1d_rebin_string == "": entry = validation_message("Q rebin string does not seem to be valid.", "Make sure to provide a valid rebin string", {"q_1d_rebin_string": self.q_1d_rebin_string}) is_invalid.update(entry) elif not is_valid_rebin_string(self.q_1d_rebin_string): entry = validation_message("Q rebin string does not seem to be valid.", "Make sure to provide a valid rebin string", {"q_1d_rebin_string": self.q_1d_rebin_string}) is_invalid.update(entry) # QXY settings if self.reduction_dimensionality is ReductionDimensionality.TwoDim: if self.q_xy_max is None or self.q_xy_step is None: entry = validation_message("Q bounds not set for 2D reduction.", "Make sure that the q_max value bound and the step for the 2D reduction.", {"q_xy_max": self.q_xy_max, "q_xy_step": self.q_xy_step}) is_invalid.update(entry) # Q Resolution settings if self.use_q_resolution: if not is_pure_none_or_not_none([self.q_resolution_a1, self.q_resolution_a2]): entry = validation_message("Inconsistent circular geometry.", "Make sure that both diameters for the circular apertures are set.", {"q_resolution_a1": self.q_resolution_a1, "q_resolution_a2": self.q_resolution_a2}) is_invalid.update(entry) if not is_pure_none_or_not_none([self.q_resolution_h1, self.q_resolution_h2, self.q_resolution_w1, self.q_resolution_w2]): entry = validation_message("Inconsistent rectangular geometry.", "Make sure that both diameters for the circular apertures are set.", {"q_resolution_h1": self.q_resolution_h1, "q_resolution_h2": self.q_resolution_h2, "q_resolution_w1": self.q_resolution_w1, "q_resolution_w2": self.q_resolution_w2}) is_invalid.update(entry) if all(element is None for element in [self.q_resolution_a1, self.q_resolution_a2, self.q_resolution_w1, self.q_resolution_w2, self.q_resolution_h1, self.q_resolution_h2]): entry = validation_message("Aperture is undefined.", "Make sure that you set the geometry for a circular or a " "rectangular aperture.", {"q_resolution_a1": self.q_resolution_a1, "q_resolution_a2": self.q_resolution_a2, "q_resolution_h1": self.q_resolution_h1, "q_resolution_h2": self.q_resolution_h2, "q_resolution_w1": self.q_resolution_w1, "q_resolution_w2": self.q_resolution_w2}) is_invalid.update(entry) if self.moderator_file is None: entry = validation_message("Missing moderator file.", "Make sure to specify a moderator file when using q resolution.", {"moderator_file": self.moderator_file}) is_invalid.update(entry) is_invalid.update({"moderator_file": "A moderator file is required for the q resolution calculation."}) if is_invalid: raise ValueError("StateMoveDetectorISIS: The provided inputs are illegal. " "Please see: {0}".format(json.dumps(is_invalid))) class StateConvertToQBuilder(object): @automatic_setters(StateConvertToQ) def __init__(self): super(StateConvertToQBuilder, self).__init__() self.state = StateConvertToQ() def build(self): self.state.validate() return copy.copy(self.state) def get_convert_to_q_builder(data_info): # The data state has most of the information that we require to define the q conversion. # For the factory method, only the facility/instrument is of relevance. facility = data_info.facility if facility is SANSFacility.ISIS: return StateConvertToQBuilder() else: raise NotImplementedError("StateConvertToQBuilder: Could not find any valid save builder for the " "specified StateData object {0}".format(str(data_info))) def is_valid_rebin_string(rebin_string): is_valid = True try: values = [float(el) for el in rebin_string.split(",")] if len(values) < 2: is_valid = False elif len(values) == 2: if values[0] > values[1]: is_valid = False elif len(values) % 2 == 1: # odd number of entries step_points = values[::2] if not is_increasing(step_points): is_valid = False else: is_valid = False except: # noqa is_valid = False return is_valid def is_increasing(step_points): return all(el1 <= el2 for el1, el2 in zip(step_points, step_points[1:]))
""" Viewer for archives packaged by archive.py """ from __future__ import print_function import argparse import os import pprint import sys import tempfile import zlib from PyInstaller.loader import pyimod02_archive from PyInstaller.archive.readers import CArchiveReader, NotAnArchiveError from PyInstaller.compat import stdin_input import PyInstaller.log stack = [] cleanup = [] def main(name, brief, debug, rec_debug, **unused_options): global stack if not os.path.isfile(name): print(name, "is an invalid file name!", file=sys.stderr) return 1 arch = get_archive(name) stack.append((name, arch)) if debug or brief: show_log(arch, rec_debug, brief) raise SystemExit(0) else: show(name, arch) while 1: try: toks = stdin_input('? ').split(None, 1) except EOFError: # Ctrl-D print(file=sys.stderr) # Clear line. break if not toks: usage() continue if len(toks) == 1: cmd = toks[0] arg = '' else: cmd, arg = toks cmd = cmd.upper() if cmd == 'U': if len(stack) > 1: arch = stack[-1][1] arch.lib.close() del stack[-1] name, arch = stack[-1] show(name, arch) elif cmd == 'O': if not arg: arg = stdin_input('open name? ') arg = arg.strip() try: arch = get_archive(arg) except NotAnArchiveError as e: print(e, file=sys.stderr) continue if arch is None: print(arg, "not found", file=sys.stderr) continue stack.append((arg, arch)) show(arg, arch) elif cmd == 'X': if not arg: arg = stdin_input('extract name? ') arg = arg.strip() data = get_data(arg, arch) if data is None: print("Not found", file=sys.stderr) continue filename = stdin_input('to filename? ') if not filename: print(repr(data)) else: with open(filename, 'wb') as fp: fp.write(data) elif cmd == 'Q': break else: usage() do_cleanup() def do_cleanup(): global stack, cleanup for (name, arch) in stack: arch.lib.close() stack = [] for filename in cleanup: try: os.remove(filename) except Exception as e: print("couldn't delete", filename, e.args, file=sys.stderr) cleanup = [] def usage(): print("U: go Up one level", file=sys.stderr) print("O <name>: open embedded archive name", file=sys.stderr) print("X <name>: extract name", file=sys.stderr) print("Q: quit", file=sys.stderr) def get_archive(name): if not stack: if name[-4:].lower() == '.pyz': return ZlibArchive(name) return CArchiveReader(name) parent = stack[-1][1] try: return parent.openEmbedded(name) except KeyError: return None except (ValueError, RuntimeError): ndx = parent.toc.find(name) dpos, dlen, ulen, flag, typcd, name = parent.toc[ndx] x, data = parent.extract(ndx) tempfilename = tempfile.mktemp() cleanup.append(tempfilename) with open(tempfilename, 'wb') as fp: fp.write(data) if typcd == 'z': return ZlibArchive(tempfilename) else: return CArchiveReader(tempfilename) def get_data(name, arch): if isinstance(arch.toc, dict): (ispkg, pos, length) = arch.toc.get(name, (0, None, 0)) if pos is None: return None with arch.lib: arch.lib.seek(arch.start + pos) return zlib.decompress(arch.lib.read(length)) ndx = arch.toc.find(name) dpos, dlen, ulen, flag, typcd, name = arch.toc[ndx] x, data = arch.extract(ndx) return data def show(name, arch): if isinstance(arch.toc, dict): print(" Name: (ispkg, pos, len)") toc = arch.toc else: print(" pos, length, uncompressed, iscompressed, type, name") toc = arch.toc.data pprint.pprint(toc) def get_content(arch, recursive, brief, output): if isinstance(arch.toc, dict): toc = arch.toc if brief: for name, _ in toc.items(): output.append(name) else: output.append(toc) else: toc = arch.toc.data for el in toc: if brief: output.append(el[5]) else: output.append(el) if recursive: if el[4] in ('z', 'a'): get_content(get_archive(el[5]), recursive, brief, output) stack.pop() def show_log(arch, recursive, brief): output = [] get_content(arch, recursive, brief, output) # first print all TOCs for out in output: if isinstance(out, dict): pprint.pprint(out) # then print the other entries pprint.pprint([out for out in output if not isinstance(out, dict)]) def get_archive_content(filename): """ Get a list of the (recursive) content of archive `filename`. This function is primary meant to be used by runtests. """ archive = get_archive(filename) stack.append((filename, archive)) output = [] get_content(archive, recursive=True, brief=True, output=output) do_cleanup() return output class ZlibArchive(pyimod02_archive.ZlibArchiveReader): def checkmagic(self): """ Overridable. Check to see if the file object self.lib actually has a file we understand. """ self.lib.seek(self.start) # default - magic is at start of file. if self.lib.read(len(self.MAGIC)) != self.MAGIC: raise RuntimeError("%s is not a valid %s archive file" % (self.path, self.__class__.__name__)) if self.lib.read(len(self.pymagic)) != self.pymagic: print("Warning: pyz is from a different Python version", file=sys.stderr) self.lib.read(4) def run(): parser = argparse.ArgumentParser() parser.add_argument('-l', '--log', default=False, action='store_true', dest='debug', help='Print an archive log (default: %(default)s)') parser.add_argument('-r', '--recursive', default=False, action='store_true', dest='rec_debug', help='Recursively print an archive log (default: %(default)s). ' 'Can be combined with -r') parser.add_argument('-b', '--brief', default=False, action='store_true', dest='brief', help='Print only file name. (default: %(default)s). ' 'Can be combined with -r') PyInstaller.log.__add_options(parser) parser.add_argument('name', metavar='pyi_archive', help="pyinstaller archive to show content of") args = parser.parse_args() PyInstaller.log.__process_options(parser, args) try: raise SystemExit(main(**vars(args))) except KeyboardInterrupt: raise SystemExit("Aborted by user request.") if __name__ == '__main__': run()
from .execute import GraphNode from . import preprocess def compile(layout_dict): preprocess.proprocess(layout_dict) # get nodes without any outputs root_nodes = layout_dict["nodes"].keys() - {l[0] for l in layout_dict["links"]} graph_dict = {} out = [GraphNode.from_layout(root_node, layout_dict, graph_dict) for root_node in root_nodes] return out
from .google import GoogleSpeaker from .watson import WatsonSpeaker """ alfred ~~~~~~~~~~~~~~~~ Google tts. """ __all__ = [ 'GoogleSpeaker', 'WatsonSpeaker' ]
class Solution(object): def findPaths(self, m, n, N, i, j): """ :type m: int :type n: int :type N: int :type i: int :type j: int :rtype: int """ MOD = 1000000007 paths = 0 cur = {(i, j): 1} for i in xrange(N): next = collections.defaultdict(int) for (x, y), cnt in cur.iteritems(): for dx, dy in [[-1, 0], [0, 1], [1, 0], [0, -1]]: nx = x + dx ny = y + dy if nx < 0 or ny < 0 or nx >= m or ny >= n: paths += cnt paths %= MOD else: next[(nx, ny)] += cnt next[(nx, ny)] %= MOD cur = next return paths
import os import unittest from urlparse import urlparse from paegan.utils.asarandom import AsaRandom class AsaRandomTest(unittest.TestCase): def test_create_random_filename(self): temp_filename = AsaRandom.filename(prefix="superduper", suffix=".nc") path = urlparse(temp_filename).path name, ext = os.path.splitext(path) assert name.index("superduper") == 0 assert ext == ".nc"
""" Contains exception classes specific to this project. """
class Zone: def __init__(self, id_zone, name, region, description): self.id = id_zone self.name = name self.region = region self.description = description
"""Description: SpeedMeter Tries To Reproduce The Behavior Of Some Car Controls (But Not Only), By Creating An "Angular" Control (Actually, Circular). I Remember To Have Seen It Somewhere, And I Decided To Implement It In wxPython. SpeedMeter Starts Its Construction From An Empty Bitmap, And It Uses Some Functions Of The wx.DC Class To Create The Rounded Effects. Everything Is Processed In The Draw() Method Of SpeedMeter Class. This Implementation Allows You To Use Either Directly The wx.PaintDC, Or The Better (For Me) Double Buffered Style With wx.BufferedPaintDC. The Double Buffered Implementation Has Been Adapted From The wxPython Wiki Example: http://wiki.wxpython.org/index.cgi/DoubleBufferedDrawing Usage: SpeedWindow1 = SM.SpeedMeter(parent, bufferedstyle, extrastyle, mousestyle ) None Of The Options (A Part Of Parent Class) Are Strictly Required, If You Use The Defaults You Get A Very Simple SpeedMeter. For The Full Listing Of The Input Parameters, See The SpeedMeter __init__() Method. Methods And Settings: SpeedMeter Is Highly Customizable, And In Particular You Can Set: - The Start And End Angle Of Existence For SpeedMeter; - The Intervals In Which You Divide The SpeedMeter (Numerical Values); - The Corresponding Thicks For The Intervals; - The Interval Colours (Different Intervals May Have Different Filling Colours); - The Ticks Font And Colour; - The Background Colour (Outsize The SpeedMeter Region); - The External Arc Colour; - The Hand (Arrow) Colour; - The Hand's Shadow Colour; - The Hand's Style ("Arrow" Or "Hand"); - The Partial Filler Colour; - The Number Of Secondary (Intermediate) Ticks; - The Direction Of Increasing Speed ("Advance" Or "Reverse"); - The Text To Be Drawn In The Middle And Its Font; - The Icon To Be Drawn In The Middle; - The First And Second Gradient Colours (That Fills The SpeedMeter Control); - The Current Value. For More Info On Methods And Initial Styles, Please Refer To The __init__() Method For SpeedMeter Or To The Specific Functions. SpeedMeter Control Is Freeware And Distributed Under The wxPython License. Latest Revision: Andrea Gavana @ 10 Oct 2005, 22.40 CET """ import wx import wx.lib.colourdb import wx.lib.fancytext as fancytext import wx.gizmos as gizmos # for LEDControl import exceptions from math import pi, sin, cos, log, sqrt, atan2 SM_NORMAL_DC = 0 SM_BUFFERED_DC = 1 SM_ROTATE_TEXT = 1 SM_DRAW_SECTORS = 2 SM_DRAW_PARTIAL_SECTORS = 4 SM_DRAW_HAND = 8 SM_DRAW_SHADOW = 16 SM_DRAW_PARTIAL_FILLER = 32 SM_DRAW_SECONDARY_TICKS = 64 SM_DRAW_MIDDLE_TEXT = 128 SM_DRAW_MIDDLE_ICON = 256 SM_DRAW_GRADIENT = 512 SM_DRAW_FANCY_TICKS = 1024 SM_DRAW_BOTTOM_TEXT = 2048 SM_DRAW_BOTTOM_LED = 4096 SM_MOUSE_TRACK = 1 LINE1 = 1 LINE2 = 2 LINE3 = 4 LINE4 = 8 LINE5 = 16 LINE6 = 32 LINE7 = 64 DECIMALSIGN = 128 DIGIT0 = LINE1 | LINE2 | LINE3 | LINE4 | LINE5 | LINE6 DIGIT1 = LINE2 | LINE3 DIGIT2 = LINE1 | LINE2 | LINE4 | LINE5 | LINE7 DIGIT3 = LINE1 | LINE2 | LINE3 | LINE4 | LINE7 DIGIT4 = LINE2 | LINE3 | LINE6 | LINE7 DIGIT5 = LINE1 | LINE3 | LINE4 | LINE6 | LINE7 DIGIT6 = LINE1 | LINE3 | LINE4 | LINE5 | LINE6 | LINE7 DIGIT7 = LINE1 | LINE2 | LINE3 DIGIT8 = LINE1 | LINE2 | LINE3 | LINE4 | LINE5 | LINE6 | LINE7 DIGIT9 = LINE1 | LINE2 | LINE3 | LINE6 | LINE7 DASH = LINE7 DIGITALL = -1 fontfamily = range(70, 78) familyname = ["default", "decorative", "roman", "script", "swiss", "modern", "teletype"] weights = range(90, 93) weightsname = ["normal", "light", "bold"] styles = [90, 93, 94] stylesname = ["normal", "italic", "slant"] class BufferedWindow(wx.Window): """ A Buffered window class. To use it, subclass it and define a Draw(DC) method that takes a DC to draw to. In that method, put the code needed to draw the picture you want. The window will automatically be double buffered, and the screen will be automatically updated when a Paint event is received. When the drawing needs to change, you app needs to call the UpdateDrawing() method. Since the drawing is stored in a bitmap, you can also save the drawing to file by calling the SaveToFile(self,file_name,file_type) method. """ def __init__(self, parent, id, pos = wx.DefaultPosition, size = wx.DefaultSize, style=wx.NO_FULL_REPAINT_ON_RESIZE, bufferedstyle=SM_BUFFERED_DC): wx.Window.__init__(self, parent, id, pos, size, style) self.Bind(wx.EVT_PAINT, self.OnPaint) self.Bind(wx.EVT_SIZE, self.OnSize) self.Bind(wx.EVT_ERASE_BACKGROUND, lambda x: None) # OnSize called to make sure the buffer is initialized. # This might result in OnSize getting called twice on some # platforms at initialization, but little harm done. self.OnSize(None) def Draw(self, dc): """ just here as a place holder. This method should be over-ridden when sub-classed """ pass def OnPaint(self, event): """ All that is needed here is to draw the buffer to screen """ if self._bufferedstyle == SM_BUFFERED_DC: dc = wx.BufferedPaintDC(self, self._Buffer) else: dc = wx.PaintDC(self) dc.DrawBitmap(self._Buffer,0,0) def OnSize(self,event): # The Buffer init is done here, to make sure the buffer is always # the same size as the Window self.Width, self.Height = self.GetClientSizeTuple() # Make new off screen bitmap: this bitmap will always have the # current drawing in it, so it can be used to save the image to # a file, or whatever. # This seems required on MacOS, it doesn't like wx.EmptyBitmap with # size = (0, 0) # Thanks to Gerard Grazzini if "__WXMAC__" in wx.Platform: if self.Width == 0: self.Width = 1 if self.Height == 0: self.Height = 1 self._Buffer = wx.EmptyBitmap(self.Width, self.Height) self.UpdateDrawing() def UpdateDrawing(self): """ This would get called if the drawing needed to change, for whatever reason. The idea here is that the drawing is based on some data generated elsewhere in the system. IF that data changes, the drawing needs to be updated. """ if self._bufferedstyle == SM_BUFFERED_DC: dc = wx.BufferedDC(wx.ClientDC(self), self._Buffer) self.Draw(dc) else: # update the buffer dc = wx.MemoryDC() dc.SelectObject(self._Buffer) self.Draw(dc) # update the screen wx.ClientDC(self).Blit(0, 0, self.Width, self.Height, dc, 0, 0) class SpeedMeter(BufferedWindow): """ Class for a gauge-style display using an arc marked with tick marks and interval numbers, and a moving needle/hand/pointer. MODIFIED to add native Python wx.gizmos.LEDNubmerCtrl-type display, and a number of other things by Jason Antman <http://www.jasonantman.com> <jason@jasonantman.com> @todo: Need to document everything (all methods). @todo: Build example code. @todo: Find everything used internally only and prefix methods with "__" @todo: Find all "raise" statements, and any "print" statements that print an error, make them work with exceptions - IndexError, TypeError, RuntimeError, LookupError @todo: change all mentions of "hand" to "needle" @todo: make sure we have setters/getters for DrawFaded, Alignment, Value (for LED) @todo: in client, test gradients """ bottomTextBottom = None DEBUG = False # controls debugging print statements def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, extrastyle=SM_DRAW_HAND, bufferedstyle=SM_BUFFERED_DC, mousestyle=0): """ Default Class Constructor. Non Standard wxPython Parameters Are: a) extrastyle: This Value Specifies The SpeedMeter Styles: - SM_ROTATE_TEXT: Draws The Ticks Rotated: The Ticks Are Rotated Accordingly To The Tick Marks Positions; - SM_DRAW_SECTORS: Different Intervals Are Painted In Differend Colours (Every Sector Of The Circle Has Its Own Colour); - SM_DRAW_PARTIAL_SECTORS: Every Interval Has Its Own Colour, But Only A Circle Corona Is Painted Near The Ticks; - SM_DRAW_HAND: The Hand (Arrow Indicator) Is Drawn; - SM_DRAW_SHADOW: A Shadow For The Hand Is Drawn; - SM_DRAW_PARTIAL_FILLER: A Circle Corona That Follows The Hand Position Is Drawn Near The Ticks; - SM_DRAW_SECONDARY_TICKS: Intermediate (Smaller) Ticks Are Drawn Between Principal Ticks; - SM_DRAW_MIDDLE_TEXT: Some Text Is Printed In The Middle Of The Control Near The Center; - SM_DRAW_MIDDLE_ICON: An Icon Is Drawn In The Middle Of The Control Near The Center; - SM_DRAW_GRADIENT: A Gradient Of Colours Will Fill The Control; - SM_DRAW_FANCY_TICKS: With This Style You Can Use XML Tags To Create Some Custom Text And Draw It At The Ticks Position. See wx.lib.fancytext For The Tags.; - SM_DRAW_BOTTOM_TEXT: Some Text Is Printed In The Bottom Of The Control - SM_DRAW_BOTTOM_LED: A wx.gizmos.LEDNumberCtrl-style value display is printed at the bottom b) bufferedstyle: This Value Allows You To Use The Normal wx.PaintDC Or The Double Buffered Drawing Options: - SM_NORMAL_DC Uses The Normal wx.PaintDC; - SM_BUFFERED_DC Uses The Double Buffered Drawing Style. c) mousestyle: This Value Allows You To Use The Mouse To Change The SpeedMeter Value Interactively With Left Click/Drag Events: - SM_MOUSE_TRACK: The Mouse Left Click/Drag Allow You To Change The SpeedMeter Value Interactively. """ self._extrastyle = extrastyle self._bufferedstyle = bufferedstyle self._mousestyle = mousestyle if self._extrastyle & SM_DRAW_SECTORS and self._extrastyle & SM_DRAW_GRADIENT: errstr = "\nERROR: Incompatible Options: SM_DRAW_SECTORS Can Not Be Used In " errstr = errstr + "Conjunction With SM_DRAW_GRADIENT." raise errstr if self._extrastyle & SM_DRAW_PARTIAL_SECTORS and self._extrastyle & SM_DRAW_SECTORS: errstr = "\nERROR: Incompatible Options: SM_DRAW_SECTORS Can Not Be Used In " errstr = errstr + "Conjunction With SM_DRAW_PARTIAL_SECTORS." raise errstr if self._extrastyle & SM_DRAW_PARTIAL_SECTORS and self._extrastyle & SM_DRAW_PARTIAL_FILLER: errstr = "\nERROR: Incompatible Options: SM_DRAW_PARTIAL_SECTORS Can Not Be Used In " errstr = errstr + "Conjunction With SM_DRAW_PARTIAL_FILLER." raise errstr if self._extrastyle & SM_DRAW_FANCY_TICKS and self._extrastyle & SM_ROTATE_TEXT: errstr = "\nERROR: Incompatible Options: SM_DRAW_FANCY_TICKS Can Not Be Used In " errstr = errstr + "Conjunction With SM_ROTATE_TEXT." raise errstr if self._extrastyle & SM_DRAW_SHADOW and self._extrastyle & SM_DRAW_HAND == 0: errstr = "\nERROR: Incompatible Options: SM_DRAW_SHADOW Can Be Used Only In " errstr = errstr + "Conjunction With SM_DRAW_HAND." if self._extrastyle & SM_DRAW_FANCY_TICKS: wx.lib.colourdb.updateColourDB() self.SetValueMultiplier() # for LED control self.SetAngleRange() self.SetIntervals() self.SetSpeedValue() self.SetIntervalColours() self.SetArcColour() self.SetTicks() self.SetTicksFont() self.SetTicksColour() self.SetSpeedBackground() self.SetHandColour() self.SetShadowColour() self.SetFillerColour() self.SetDirection() self.SetNumberOfSecondaryTicks() self.SetMiddleText() self.SetMiddleTextFont() self.SetMiddleTextColour() self.SetBottomText() self.SetBottomTextFont() self.SetBottomTextColour() self.SetFirstGradientColour() self.SetSecondGradientColour() self.SetHandStyle() self.DrawExternalArc() self.DrawExternalCircle() # for LED control self._LEDwidth = 0 self._LEDheight = 0 self._LEDx = 0 self._LEDy = 0 self._InitLEDInternals() self.SetLEDAlignment() self.SetDrawFaded() BufferedWindow.__init__(self, parent, id, pos, size, style=wx.NO_FULL_REPAINT_ON_RESIZE, bufferedstyle=bufferedstyle) if self._mousestyle & SM_MOUSE_TRACK: self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouseMotion) def Draw(self, dc): """ Draws Everything On The Empty Bitmap. Here All The Chosen Styles Are Applied. GIGANTIC HUMONGOUS UGLY function that draws I{everything} on the bitmap except for the LEDs. @param dc: the dc @type dc: L{wx.BufferedDC} """ size = self.GetClientSize() if size.x < 21 or size.y < 21: return new_dim = size.Get() if not hasattr(self, "dim"): self.dim = new_dim self.scale = min([float(new_dim[0]) / self.dim[0], float(new_dim[1]) / self.dim[1]]) # Create An Empty Bitmap self.faceBitmap = wx.EmptyBitmap(size.width, size.height) dc.BeginDrawing() speedbackground = self.GetSpeedBackground() # Set Background Of The Control dc.SetBackground(wx.Brush(speedbackground)) dc.Clear() centerX = self.faceBitmap.GetWidth()/2 centerY = self.faceBitmap.GetHeight()/2 self.CenterX = centerX self.CenterY = centerY # Get The Radius Of The Sector. Set It A Bit Smaller To Correct Draw After radius = min(centerX, centerY) - 2 self.Radius = radius # Get The Angle Of Existance Of The Sector anglerange = self.GetAngleRange() startangle = anglerange[1] endangle = anglerange[0] self.StartAngle = startangle self.EndAngle = endangle # Initialize The Colours And The Intervals - Just For Reference To The # Children Functions colours = None intervals = None if self._extrastyle & SM_DRAW_SECTORS or self._extrastyle & SM_DRAW_PARTIAL_SECTORS: # Get The Intervals Colours colours = self.GetIntervalColours()[:] textangles = [] colourangles = [] xcoords = [] ycoords = [] # Get The Intervals (Partial Sectors) intervals = self.GetIntervals()[:] start = min(intervals) end = max(intervals) span = end - start self.StartValue = start self.EndValue = end self.Span = span # Get The Current Value For The SpeedMeter currentvalue = self.GetSpeedValue() # Get The Direction Of The SpeedMeter direction = self.GetDirection() if direction == "Reverse": intervals.reverse() if self._extrastyle & SM_DRAW_SECTORS or self._extrastyle & SM_DRAW_PARTIAL_SECTORS: colours.reverse() currentvalue = end - currentvalue # This Because DrawArc Does Not Draw Last Point offset = 0.1*self.scale/180.0 xstart, ystart = self.__CircleCoords(radius+1, -endangle, centerX, centerY) xend, yend = self.__CircleCoords(radius+1, -startangle-offset, centerX, centerY) # Calculate The Angle For The Current Value Of SpeedMeter accelangle = (currentvalue - start)/float(span)*(startangle-endangle) - startangle dc.SetPen(wx.TRANSPARENT_PEN) if self._extrastyle & SM_DRAW_PARTIAL_FILLER: # Get Some Data For The Partial Filler fillercolour = self.GetFillerColour() fillerendradius = radius - 10.0*self.scale fillerstartradius = radius if direction == "Advance": fillerstart = accelangle fillerend = -startangle else: fillerstart = -endangle fillerend = accelangle xs1, ys1 = self.__CircleCoords(fillerendradius, fillerstart, centerX, centerY) xe1, ye1 = self.__CircleCoords(fillerendradius, fillerend, centerX, centerY) xs2, ys2 = self.__CircleCoords(fillerstartradius, fillerstart, centerX, centerY) xe2, ye2 = self.__CircleCoords(fillerstartradius, fillerend, centerX, centerY) # Get The Sector In Which The Current Value Is intersection = self.__GetIntersection(currentvalue, intervals) sectorradius = radius - 10*self.scale else: sectorradius = radius if self._extrastyle & SM_DRAW_PARTIAL_FILLER: # Draw The Filler (Both In "Advance" And "Reverse" Directions) dc.SetBrush(wx.Brush(fillercolour)) dc.DrawArc(xs2, ys2, xe2, ye2, centerX, centerY) if self._extrastyle & SM_DRAW_SECTORS == 0: dc.SetBrush(wx.Brush(speedbackground)) xclean1, yclean1 = self.__CircleCoords(sectorradius, -endangle, centerX, centerY) xclean2, yclean2 = self.__CircleCoords(sectorradius, -startangle-offset, centerX, centerY) dc.DrawArc(xclean1, yclean1, xclean2, yclean2, centerX, centerY) # This Is Needed To Fill The Partial Sector Correctly xold, yold = self.__CircleCoords(radius, startangle+endangle, centerX, centerY) # Draw The Sectors for ii, interval in enumerate(intervals): if direction == "Advance": current = interval - start else: current = end - interval angle = (current/float(span))*(startangle-endangle) - startangle angletext = -((pi/2.0) + angle)*180/pi textangles.append(angletext) colourangles.append(angle) xtick, ytick = self.__CircleCoords(radius, angle, centerX, centerY) # Keep The Coordinates, We Will Need Them After To Position The Ticks xcoords.append(xtick) ycoords.append(ytick) x = xtick y = ytick if self._extrastyle & SM_DRAW_SECTORS: if self._extrastyle & SM_DRAW_PARTIAL_FILLER: if direction == "Advance": if current > currentvalue: x, y = self.__CircleCoords(radius, angle, centerX, centerY) else: x, y = self.__CircleCoords(sectorradius, angle, centerX, centerY) else: if current < end - currentvalue: x, y = self.__CircleCoords(radius, angle, centerX, centerY) else: x, y = self.__CircleCoords(sectorradius, angle, centerX, centerY) else: x, y = self.__CircleCoords(radius, angle, centerX, centerY) if ii > 0: if self._extrastyle & SM_DRAW_PARTIAL_FILLER and ii == intersection: # We Got The Interval In Which There Is The Current Value. If We Choose # A "Reverse" Direction, First We Draw The Partial Sector, Next The Filler dc.SetBrush(wx.Brush(speedbackground)) if direction == "Reverse": if self._extrastyle & SM_DRAW_SECTORS: dc.SetBrush(wx.Brush(colours[ii-1])) dc.DrawArc(xe2, ye2, xold, yold, centerX, centerY) if self._extrastyle & SM_DRAW_SECTORS: dc.SetBrush(wx.Brush(colours[ii-1])) else: dc.SetBrush(wx.Brush(speedbackground)) dc.DrawArc(xs1, ys1, xe1, ye1, centerX, centerY) if self._extrastyle & SM_DRAW_SECTORS: dc.SetBrush(wx.Brush(colours[ii-1])) # Here We Draw The Rest Of The Sector In Which The Current Value Is if direction == "Advance": dc.DrawArc(xs1, ys1, x, y, centerX, centerY) x = xs1 y = ys1 else: dc.DrawArc(xe2, ye2, x, y, centerX, centerY) elif self._extrastyle & SM_DRAW_SECTORS: dc.SetBrush(wx.Brush(colours[ii-1])) # Here We Still Use The SM_DRAW_PARTIAL_FILLER Style, But We Are Not # In The Sector Where The Current Value Resides if self._extrastyle & SM_DRAW_PARTIAL_FILLER and ii != intersection: if direction == "Advance": dc.DrawArc(x, y, xold, yold, centerX, centerY) else: if ii < intersection: dc.DrawArc(x, y, xold, yold, centerX, centerY) # This Is The Case Where No SM_DRAW_PARTIAL_FILLER Has Been Chosen else: dc.DrawArc(x, y, xold, yold, centerX, centerY) else: if self._extrastyle & SM_DRAW_PARTIAL_FILLER and self._extrastyle & SM_DRAW_SECTORS: dc.SetBrush(wx.Brush(fillercolour)) dc.DrawArc(xs2, ys2, xe2, ye2, centerX, centerY) x, y = self.__CircleCoords(sectorradius, angle, centerX, centerY) dc.SetBrush(wx.Brush(colours[ii])) dc.DrawArc(xs1, ys1, xe1, ye1, centerX, centerY) x = xs2 y = ys2 xold = x yold = y if self._extrastyle & SM_DRAW_PARTIAL_SECTORS: sectorendradius = radius - 10.0*self.scale sectorstartradius = radius xps, yps = self.__CircleCoords(sectorstartradius, angle, centerX, centerY) if ii > 0: dc.SetBrush(wx.Brush(colours[ii-1])) dc.DrawArc(xps, yps, xpsold, ypsold, centerX, centerY) xpsold = xps ypsold = yps if self._extrastyle & SM_DRAW_PARTIAL_SECTORS: xps1, yps1 = self.__CircleCoords(sectorendradius, -endangle+2*offset, centerX, centerY) xps2, yps2 = self.__CircleCoords(sectorendradius, -startangle-2*offset, centerX, centerY) dc.SetBrush(wx.Brush(speedbackground)) dc.DrawArc(xps1, yps1, xps2, yps2, centerX, centerY) if self._extrastyle & SM_DRAW_GRADIENT: dc.SetPen(wx.TRANSPARENT_PEN) xcurrent, ycurrent = self.__CircleCoords(radius, accelangle, centerX, centerY) # calculate gradient coefficients col2 = self.GetSecondGradientColour() col1 = self.GetFirstGradientColour() r1, g1, b1 = int(col1.Red()), int(col1.Green()), int(col1.Blue()) r2, g2, b2 = int(col2.Red()), int(col2.Green()), int(col2.Blue()) flrect = float(radius+self.scale) numsteps = 200 rstep = float((r2 - r1)) / numsteps gstep = float((g2 - g1)) / numsteps bstep = float((b2 - b1)) / numsteps rf, gf, bf = 0, 0, 0 radiusteps = flrect/numsteps interface = 0 for ind in range(numsteps+1): currCol = (r1 + rf, g1 + gf, b1 + bf) dc.SetBrush(wx.Brush(currCol)) gradradius = flrect - radiusteps*ind xst1, yst1 = self.__CircleCoords(gradradius, -endangle, centerX, centerY) xen1, yen1 = self.__CircleCoords(gradradius, -startangle-offset, centerX, centerY) if self._extrastyle & SM_DRAW_PARTIAL_FILLER: if gradradius >= fillerendradius: if direction == "Advance": dc.DrawArc(xstart, ystart, xcurrent, ycurrent, centerX, centerY) else: dc.DrawArc(xcurrent, ycurrent, xend, yend, centerX, centerY) else: if interface == 0: interface = 1 myradius = fillerendradius + 1 xint1, yint1 = self.__CircleCoords(myradius, -endangle, centerX, centerY) xint2, yint2 = self.__CircleCoords(myradius, -startangle-offset, centerX, centerY) dc.DrawArc(xint1, yint1, xint2, yint2, centerX, centerY) dc.DrawArc(xst1, yst1, xen1, yen1, centerX, centerY) else: if self._extrastyle & SM_DRAW_PARTIAL_SECTORS: if gradradius <= sectorendradius: if interface == 0: interface = 1 myradius = sectorendradius + 1 xint1, yint1 = self.__CircleCoords(myradius, -endangle, centerX, centerY) xint2, yint2 = self.__CircleCoords(myradius, -startangle-offset, centerX, centerY) dc.DrawArc(xint1, yint1, xint2, yint2, centerX, centerY) else: dc.DrawArc(xst1, yst1, xen1, yen1, centerX, centerY) else: dc.DrawArc(xst1, yst1, xen1, yen1, centerX, centerY) rf = rf + rstep gf = gf + gstep bf = bf + bstep textheight = 0 # Get The Ticks And The Ticks Colour ticks = self.GetTicks()[:] tickscolour = self.GetTicksColour() if direction == "Reverse": ticks.reverse() if self._extrastyle & SM_DRAW_SECONDARY_TICKS: ticknum = self.GetNumberOfSecondaryTicks() oldinterval = intervals[0] dc.SetPen(wx.Pen(tickscolour, 1)) dc.SetBrush(wx.Brush(tickscolour)) dc.SetTextForeground(tickscolour) # Get The Font For The Ticks tfont, fontsize = self.GetTicksFont() tfont = tfont[0] myfamily = tfont.GetFamily() fsize = self.scale*fontsize tfont.SetPointSize(int(fsize)) tfont.SetFamily(myfamily) dc.SetFont(tfont) if self._extrastyle & SM_DRAW_FANCY_TICKS: facename = tfont.GetFaceName() ffamily = familyname[fontfamily.index(tfont.GetFamily())] fweight = weightsname[weights.index(tfont.GetWeight())] fstyle = stylesname[styles.index(tfont.GetStyle())] fcolour = wx.TheColourDatabase.FindName(tickscolour) textheight = 0 # Draw The Ticks And The Markers (Text Ticks) for ii, angles in enumerate(textangles): strings = ticks[ii] if self._extrastyle & SM_DRAW_FANCY_TICKS == 0: width, height, dummy, dummy = dc.GetFullTextExtent(strings, tfont) textheight = height else: width, height, dummy = fancytext.GetFullExtent(strings, dc) textheight = height lX = dc.GetCharWidth()/2.0 lY = dc.GetCharHeight()/2.0 if self._extrastyle & SM_ROTATE_TEXT: angis = colourangles[ii] - float(width)/(2.0*radius) x, y = self.__CircleCoords(radius-10.0*self.scale, angis, centerX, centerY) dc.DrawRotatedText(strings, x, y, angles) else: angis = colourangles[ii] if self._extrastyle & SM_DRAW_FANCY_TICKS == 0: x, y = self.__CircleCoords(radius-10*self.scale, angis, centerX, centerY) lX = lX*len(strings) x = x - lX - width*cos(angis)/2.0 y = y - lY - height*sin(angis)/2.0 if self._extrastyle & SM_DRAW_FANCY_TICKS: fancystr = '<font family="' + ffamily + '" size="' + str(int(fsize)) + '" weight="' + fweight + '"' fancystr = fancystr + ' color="' + fcolour + '"' + ' style="' + fstyle + '"> ' + strings + ' </font>' width, height, dummy = fancytext.GetFullExtent(fancystr, dc) x, y = self.__CircleCoords(radius-10*self.scale, angis, centerX, centerY) x = x - width/2.0 - width*cos(angis)/2.0 y = y - height/2.0 - height*sin(angis)/2.0 fancytext.RenderToDC(fancystr, dc, x, y) else: dc.DrawText(strings, x, y) # This Is The Small Rectangle --> Tick Mark rectangle = colourangles[ii] + pi/2.0 sinrect = sin(rectangle) cosrect = cos(rectangle) x1 = xcoords[ii] - self.scale*cosrect y1 = ycoords[ii] - self.scale*sinrect x2 = x1 + 3*self.scale*cosrect y2 = y1 + 3*self.scale*sinrect x3 = x1 - 10*self.scale*sinrect y3 = y1 + 10*self.scale*cosrect x4 = x3 + 3*self.scale*cosrect y4 = y3 + 3*self.scale*sinrect points = [(x1, y1), (x2, y2), (x4, y4), (x3, y3)] dc.DrawPolygon(points) if self._extrastyle & SM_DRAW_SECONDARY_TICKS: if ii > 0: newinterval = intervals[ii] oldinterval = intervals[ii-1] spacing = (newinterval - oldinterval)/float(ticknum+1) for tcount in xrange(ticknum): if direction == "Advance": oldinterval = (oldinterval + spacing) - start stint = oldinterval else: oldinterval = start + (oldinterval + spacing) stint = end - oldinterval angle = (stint/float(span))*(startangle-endangle) - startangle rectangle = angle + pi/2.0 sinrect = sin(rectangle) cosrect = cos(rectangle) xt, yt = self.__CircleCoords(radius, angle, centerX, centerY) x1 = xt - self.scale*cosrect y1 = yt - self.scale*sinrect x2 = x1 + self.scale*cosrect y2 = y1 + self.scale*sinrect x3 = x1 - 6*self.scale*sinrect y3 = y1 + 6*self.scale*cosrect x4 = x3 + self.scale*cosrect y4 = y3 + self.scale*sinrect points = [(x1, y1), (x2, y2), (x4, y4), (x3, y3)] dc.DrawPolygon(points) oldinterval = newinterval tfont.SetPointSize(fontsize) tfont.SetFamily(myfamily) self.SetTicksFont(tfont) # Draw The External Arc dc.SetBrush(wx.TRANSPARENT_BRUSH) if self._drawarc and not self._drawfullarc: dc.SetPen(wx.Pen(self.GetArcColour(), 2.0)) # If It's Not A Complete Circle, Draw The Connecting Lines And The Arc if abs(abs(startangle - endangle) - 2*pi) > 1.0/180.0: dc.DrawArc(xstart, ystart, xend, yend, centerX, centerY) dc.DrawLine(xstart, ystart, centerX, centerY) dc.DrawLine(xend, yend, centerX, centerY) else: # Draw A Circle, Is A 2*pi Extension Arc = Complete Circle dc.DrawCircle(centerX, centerY, radius) if self._drawfullarc: dc.DrawCircle(centerX, centerY, radius) # Here We Draw The Text In The Middle, Near The Start Of The Arrow (If Present) # This Is Like The "Km/h" Or "mph" Text In The Cars if self._extrastyle & SM_DRAW_MIDDLE_TEXT: middlecolour = self.GetMiddleTextColour() middletext = self.GetMiddleText() middleangle = (startangle + endangle)/2.0 middlefont, middlesize = self.GetMiddleTextFont() middlesize = self.scale*middlesize middlefont.SetPointSize(int(middlesize)) dc.SetFont(middlefont) mw, mh, dummy, dummy = dc.GetFullTextExtent(middletext, middlefont) newx = centerX + 1.5*mw*cos(middleangle) - mw/2.0 newy = centerY - 1.5*mh*sin(middleangle) - mh/2.0 dc.SetTextForeground(middlecolour) dc.DrawText(middletext, newx, newy) # Here We Draw The Text In The Bottom # This Is Like The "Km/h" Or "mph" Text In The Cars if self._extrastyle & SM_DRAW_BOTTOM_TEXT: bottomcolour = self.GetBottomTextColour() bottomtext = self.GetBottomText() # hack for two lines of text if bottomtext.find("\n") != -1: # we have a newline foo = bottomtext.partition("\n") bottomtext1 = foo[0] bottomtext2 = foo[2] bottomangle = (startangle + endangle)/2.0 bottomfont, bottomsize = self.GetBottomTextFont() bottomsize = self.scale*bottomsize bottomfont.SetPointSize(int(bottomsize)) dc.SetFont(bottomfont) mw, mh, dummy, dummy = dc.GetFullTextExtent(bottomtext1, bottomfont) newx = centerX + 1.5*mw*cos(bottomangle) - mw/2.0 newy = ystart yoffset = mh + (mh * 2) dc.SetTextForeground(bottomcolour) dc.DrawText(bottomtext1, newx, newy) mw, mh, dummy, dummy = dc.GetFullTextExtent(bottomtext2, bottomfont) newx = centerX + 1.5*mw*cos(bottomangle) - mw/2.0 newy = ystart + yoffset dc.SetTextForeground(bottomcolour) dc.DrawText(bottomtext2, newx, newy) else: bottomangle = (startangle + endangle)/2.0 bottomfont, bottomsize = self.GetBottomTextFont() bottomsize = self.scale*bottomsize bottomfont.SetPointSize(int(bottomsize)) dc.SetFont(bottomfont) mw, mh, dummy, dummy = dc.GetFullTextExtent(bottomtext, bottomfont) newx = centerX + 1.5*mw*cos(bottomangle) - mw/2.0 newy = ystart dc.SetTextForeground(bottomcolour) dc.DrawText(bottomtext, newx, newy) self.bottomTextBottom = (int)(newy + mh) # Here We Draw The Icon In The Middle, Near The Start Of The Arrow (If Present) # This Is Like The "Fuel" Icon In The Cars if self._extrastyle & SM_DRAW_MIDDLE_ICON: middleicon = self.GetMiddleIcon() middlewidth, middleheight = self.__GetMiddleIconDimens() middleicon.SetWidth(middlewidth*self.scale) middleicon.SetHeight(middleheight*self.scale) middleangle = (startangle + endangle)/2.0 mw = middleicon.GetWidth() mh = middleicon.GetHeight() newx = centerX + 1.5*mw*cos(middleangle) - mw/2.0 newy = centerY - 1.5*mh*sin(middleangle) - mh/2.0 dc.DrawIcon(middleicon, newx, newy) # Restore Icon Dimension, If Not Something Strange Happens middleicon.SetWidth(middlewidth) middleicon.SetHeight(middleheight) # Requested To Draw The Hand if self._extrastyle & SM_DRAW_HAND: handstyle = self.GetHandStyle() handcolour = self.GetHandColour() # Calculate The Data For The Hand if textheight == 0: maxradius = radius-10*self.scale else: maxradius = radius-5*self.scale-textheight xarr, yarr = self.__CircleCoords(maxradius, accelangle, centerX, centerY) if handstyle == "Arrow": x1, y1 = self.__CircleCoords(maxradius, accelangle - 4.0/180, centerX, centerY) x2, y2 = self.__CircleCoords(maxradius, accelangle + 4.0/180, centerX, centerY) x3, y3 = self.__CircleCoords(maxradius+3*(abs(xarr-x1)), accelangle, centerX, centerY) newx = centerX + 4*cos(accelangle)*self.scale newy = centerY + 4*sin(accelangle)*self.scale else: x1 = centerX + 4*self.scale*sin(accelangle) y1 = centerY - 4*self.scale*cos(accelangle) x2 = xarr y2 = yarr x3 = centerX - 4*self.scale*sin(accelangle) y3 = centerY + 4*self.scale*cos(accelangle) x4, y4 = self.__CircleCoords(5*self.scale*sqrt(3), accelangle+pi, centerX, centerY) if self._extrastyle & SM_DRAW_SHADOW: if handstyle == "Arrow": # Draw The Shadow shadowcolour = self.GetShadowColour() dc.SetPen(wx.Pen(shadowcolour, 5*log(self.scale+1))) dc.SetBrush(wx.Brush(shadowcolour)) shadowdistance = 2.0*self.scale dc.DrawLine(newx + shadowdistance, newy + shadowdistance, xarr + shadowdistance, yarr + shadowdistance) dc.DrawPolygon([(x1+shadowdistance, y1+shadowdistance), (x2+shadowdistance, y2+shadowdistance), (x3+shadowdistance, y3+shadowdistance)]) else: # Draw The Shadow shadowcolour = self.GetShadowColour() dc.SetBrush(wx.Brush(shadowcolour)) dc.SetPen(wx.Pen(shadowcolour, 1.0)) shadowdistance = 1.5*self.scale dc.DrawPolygon([(x1+shadowdistance, y1+shadowdistance), (x2+shadowdistance, y2+shadowdistance), (x3+shadowdistance, y3+shadowdistance), (x4+shadowdistance, y4+shadowdistance)]) if handstyle == "Arrow": dc.SetPen(wx.Pen(handcolour, 1.5)) # Draw The Small Circle In The Center --> The Hand "Holder" dc.SetBrush(wx.Brush(speedbackground)) dc.DrawCircle(centerX, centerY, 4*self.scale) dc.SetPen(wx.Pen(handcolour, 5*log(self.scale+1))) # Draw The "Hand", An Arrow dc.DrawLine(newx, newy, xarr, yarr) # Draw The Arrow Pointer dc.SetBrush(wx.Brush(handcolour)) dc.DrawPolygon([(x1, y1), (x2, y2), (x3, y3)]) else: # Draw The Hand Pointer dc.SetPen(wx.Pen(handcolour, 1.5)) dc.SetBrush(wx.Brush(handcolour)) dc.DrawPolygon([(x1, y1), (x2, y2), (x3, y3), (x4, y4)]) # Draw The Small Circle In The Center --> The Hand "Holder" dc.SetBrush(wx.Brush(speedbackground)) dc.DrawCircle(centerX, centerY, 4*self.scale) # here is where we draw the LEDNumberCtrl-style display at the bottom, if requested if self._extrastyle & SM_DRAW_BOTTOM_LED: self._DrawLED(dc, centerX) dc.EndDrawing() def SetIntervals(self, intervals=None): """ Sets The Intervals For SpeedMeter (Main Ticks Numeric Values). @param intervals: list of the interval end points @type intervals: L{list} of L{int}s or L{float}s, one marking the end of each interval """ if intervals is None: intervals = [0, 50, 100] self._intervals = intervals def GetIntervals(self): """ Gets The Intervals For SpeedMeter. @rtype: L{list} of L{int}s or L{float}s, one marking the end of each interval """ return self._intervals def GetBottomTextBottom(self): """ Gets the Y position of the bottom of the BottomText. Used to position the LEDNumberCtrl if one is present. @return: Y position of the bottom of the BottomText on the BufferedWindow (DC) @rtype: int """ return self.bottomTextBottom def GetWidth(self): """ Gets the whole width of the SpeedMeter. Used to position the LEDNumberCtrl if present. @return: Width (px) of the whole faceBitmap @rtype: int """ return self.faceBitmap.GetWidth() def SetSpeedValue(self, value=None): """ Sets The Current Value For SpeedMeter. Please also see L{SetValueMultiplier}() function. The value MUST be within the range specified by the L{intervals} (see L{GetIntervals}). Calling this function will trigger the L{UpdateDrawing}() method to redraw. @param value: the desired value @type value: L{int} or L{float} """ if value is None: value = (max(self._intervals) - min(self._intervals))/2.0 else: if not (isinstance(value, int) or isinstance(value, float)): raise TypeError("value parameter of SetSpeedValue must be of int or float type, not " + str(type(value))) if value < min(self._intervals): raise IndexError("value parameter of SetSpeedValue is smaller than the minimum element in the points (intervals) list") elif value > max(self._intervals): raise IndexError("value parameter of SetSpeedValue Greater Than Maximum Element In Points List") self._speedvalue = value self._speedStr = str(int(value * self._ValueMultiplier)) try: self.UpdateDrawing() except: pass def GetSpeedValue(self): """ Gets The Current Value For SpeedMeter. @rtype: L{int} or L{float} """ return self._speedvalue def SetAngleRange(self, start=0, end=pi): """ Sets The Range Of Existence For SpeedMeter. This Values *Must* Be Specifiend In RADIANS. @param start: the start angle (default 0) @type start: L{int} in radians @param end: the end angle (default pi) @type end: L{int} in radians """ self._anglerange = [start, end] def GetAngleRange(self): """ Gets The Range Of Existence For SpeedMeter. The Returned Values Are In RADIANS. @rtype: L{list} of L{int}s (radians) like [start, end] """ return self._anglerange def SetIntervalColours(self, colours=None): """ Sets The Colours For The Intervals. Every Intervals (Circle Sector) Should Have A Colour. Expects a list of L{wx.Colour}s of the same length as the number of intervals. @param colours: list of colours to use for intervals @type colours: L{list} of L{wx.Colour}s of same length as number of intervals """ if colours is None: if not hasattr(self, "_anglerange"): errstr = "\nERROR: Impossible To Set Interval Colours," errstr = errstr + " Please Define The Intervals Ranges Before." raise errstr return colours = [wx.WHITE]*len(self._intervals) else: if len(colours) != len(self._intervals) - 1: errstr = "\nERROR: Length Of Colour List Does Not Match Length" errstr = errstr + " Of Intervals Ranges List." print errstr raise errstr return self._intervalcolours = colours def GetIntervalColours(self): """ Gets The Colours For The Intervals. @rtype: L{list} of L{wx.Colour}s """ if hasattr(self, "_intervalcolours"): return self._intervalcolours else: raise "\nERROR: No Interval Colours Have Been Defined" def SetTicks(self, ticks=None): """ Sets The Ticks For SpeedMeter Intervals (Main Ticks String Values). Must be a list of strings, of the same length as the number of intervals. This should probably not be called from outside the class, unless you want to set the interval ticks to something weird (maybe a fuel meter using "1/4", "1/2", etc.). It is probably better to use the L{SetValueMultiplier}() function if you're dealing with linear integers. @param ticks: list of strings, of the same length as the number of intervals. @type ticks: L{list} of L{string}s """ if ticks is None: if not hasattr(self, "_anglerange"): errstr = "\nERROR: Impossible To Set Interval Ticks," errstr = errstr + " Please Define The Intervals Ranges Before." raise errstr return ticks = [] for values in self._intervals: ticks.append(str(values)) else: if len(ticks) != len(self._intervals): errstr = "\nERROR: Length Of Ticks List Does Not Match Length" errstr = errstr + " Of Intervals Ranges List." raise errstr return self._intervalticks = ticks def GetTicks(self): """ Gets The Ticks For SpeedMeter Intervals (Main Ticks String Values). @rtype: L{list} of L{string}s """ if hasattr(self, "_intervalticks"): return self._intervalticks else: raise "\nERROR: No Interval Ticks Have Been Defined" def SetTicksFont(self, font=None): """ Sets The Ticks Font. @param font: the font for the text (default 10pt, wx.Font(1, wx.SWISS, wx.NORMAL, wx.BOLD, False)) @type font: L{wx.Font} """ if font is None: self._originalfont = [wx.Font(10, wx.SWISS, wx.NORMAL, wx.BOLD, False)] self._originalsize = 10 else: self._originalfont = [font] self._originalsize = font.GetPointSize() def GetTicksFont(self): """ Gets The Ticks Font. @rtype: L{tuple} of (L{wx.Font}, L{float} size) """ return self._originalfont[:], self._originalsize def SetTicksColour(self, colour=None): """ Sets The Ticks Colour. @param colour @type colour: L{wx.Colour} """ if colour is None: colour = wx.BLUE self._tickscolour = colour def GetTicksColour(self): """ Gets The Ticks Colour. @rtype: L{wx.Colour} """ return self._tickscolour def SetSpeedBackground(self, colour=None): """ Sets The Background Colour Outside The SpeedMeter Control. @param colour @type colour: L{wx.Colour} """ if colour is None: colour = wx.SystemSettings_GetColour(0) self._speedbackground = colour def GetSpeedBackground(self): """ Gets The Background Colour Outside The SpeedMeter Control. @rtype: L{wx.Colour} """ return self._speedbackground def SetHandColour(self, colour=None): """ Sets The Hand (Arrow Indicator) Colour. @param colour @type colour: L{wx.Colour} """ if colour is None: colour = wx.RED self._handcolour = colour def GetHandColour(self): """ Gets The Hand (Arrow Indicator) Colour. @rtype: L{wx.Colour} """ return self._handcolour def SetArcColour(self, colour=None): """ Sets The External Arc Colour (Thicker Line). @param colour @type colour: L{wx.Colour} """ if colour is None: colour = wx.BLACK self._arccolour = colour def GetArcColour(self): """ Gets The External Arc Colour. @rtype: L{wx.Colour} """ return self._arccolour def SetShadowColour(self, colour=None): """ Sets The Hand's Shadow Colour. @param colour @type colour: L{wx.Colour} """ if colour is None: colour = wx.Colour(150, 150, 150) self._shadowcolour = colour def GetShadowColour(self): """ Gets The Hand's Shadow Colour. @rtype: L{wx.Colour} """ return self._shadowcolour def SetFillerColour(self, colour=None): """ Sets The Partial Filler Colour. A Circle Corona Near The Ticks Will Be Filled With This Colour, From The Starting Value To The Current Value Of SpeedMeter. @param colour: the colour @type colour: L{wx.Colour} """ if colour is None: colour = wx.Colour(255, 150, 50) self._fillercolour = colour def GetFillerColour(self): """ Gets The Partial Filler Colour. @rtype: L{wx.Colour} """ return self._fillercolour def SetDirection(self, direction=None): """ Sets The Direction Of Advancing SpeedMeter Value. Specifying "Advance" Will Move The Hand In Clock-Wise Direction (Like Normal Car Speed Control), While Using "Reverse" Will Move It CounterClock-Wise Direction. @param direction: direction of needle movement @type direction: L{string} "Advance" (default) or "Reverse" """ if direction is None: direction = "Advance" if direction not in ["Advance", "Reverse"]: raise '\nERROR: Direction Parameter Should Be One Of "Advance" Or "Reverse".' return self._direction = direction def GetDirection(self): """ Gets The Direction Of Advancing SpeedMeter Value. @rtype: L{string} "Advance" or "Reverse" """ return self._direction def SetNumberOfSecondaryTicks(self, ticknum=None): """ Sets The Number Of Secondary (Intermediate) Ticks. @param ticknum: number of secondary ticks (MUST be >= 1, default is 3) @type ticknum: L{int} """ if ticknum is None: ticknum = 3 if ticknum < 1: raise "\nERROR: Number Of Ticks Must Be Greater Than 1." return self._secondaryticks = ticknum def GetNumberOfSecondaryTicks(self): """ Gets The Number Of Secondary (Intermediate) Ticks. @rtype: L{int} """ return self._secondaryticks def SetMiddleText(self, text=None): """ Sets The Text To Be Drawn Near The Center Of SpeedMeter. @param text: the text to draw @type text: L{string} """ if text is None: text = "" self._middletext = text def GetMiddleText(self): """ Gets The Text To Be Drawn Near The Center Of SpeedMeter. @rtype: L{string} """ return self._middletext def SetMiddleTextFont(self, font=None): """ Sets The Font For The Text In The Middle. @param font: the font for the text (default 10pt, wx.Font(1, wx.SWISS, wx.NORMAL, wx.BOLD, False)) @type font: L{wx.Font} """ if font is None: self._middletextfont = wx.Font(1, wx.SWISS, wx.NORMAL, wx.BOLD, False) self._middletextsize = 10.0 self._middletextfont.SetPointSize(self._middletextsize) else: self._middletextfont = font self._middletextsize = font.GetPointSize() self._middletextfont.SetPointSize(self._middletextsize) def GetMiddleTextFont(self): """ Gets The Font For The Text In The Middle. @rtype: L{tuple} of (L{wx.Font}, L{float} size) """ return self._middletextfont, self._middletextsize def SetMiddleTextColour(self, colour=None): """ Sets The Colour For The Text In The Middle. @param colour: the colour for the text @type colour: L{wx.Colour} """ if colour is None: colour = wx.BLUE self._middlecolour = colour def GetMiddleTextColour(self): """ Gets The Colour For The Text In The Middle. @rtype: L{wx.Colour} """ return self._middlecolour def SetBottomText(self, text=None): """ Sets The Text To Be Drawn Near The Bottom Of SpeedMeter. Can have up to one newline. This should be used for a label, such as the gauge type and scale (i.e. "RPM x1000) Newlines are understood. The text is drawn as two separate lines, and this is taken into account when positioning the LED digits if used. @param text: the text to draw @type text: L{string} """ if text is None: text = "" self._bottomtext = text def GetBottomText(self): """ Gets The Text To Be Drawn Near The Bottom Of SpeedMeter (label) @rtype: L{string} """ return self._bottomtext def SetBottomTextFont(self, font=None): """ Sets The Font For The Text In The Bottom. @param font: the font for the text (default 10pt, wx.Font(1, wx.SWISS, wx.NORMAL, wx.BOLD, False)) @type font: L{wx.Font} """ if font is None: self._bottomtextfont = wx.Font(1, wx.SWISS, wx.NORMAL, wx.BOLD, False) self._bottomtextsize = 10.0 self._bottomtextfont.SetPointSize(self._bottomtextsize) else: self._bottomtextfont = font self._bottomtextsize = font.GetPointSize() self._bottomtextfont.SetPointSize(self._bottomtextsize) def GetBottomTextFont(self): """ Gets The Font For The Text In The Bottom. @rtype: L{tuple} of (L{wx.Font}, L{float} size) """ return self._bottomtextfont, self._bottomtextsize def SetBottomTextColour(self, colour=None): """ Sets The Colour For The Text In The Bottom of the gauge (label). @param colour: the colour for the text @type colour: L{wx.Colour} """ if colour is None: colour = wx.BLUE self._bottomcolour = colour def SetLEDColour(self, colour=None): """ Sets The Colour For Bottom LED digits. @param colour: the colour for the digits @type colour: L{wx.Colour} """ if colour is None: colour = wx.GREEN self._ledcolour = colour def GetLEDColour(self): """ Gets The Colour For The LED Digits @rtype: L{wx.Colour} """ return self._ledcolour def GetBottomTextColour(self): """ Gets The Colour For The Text In The Bottom @rtype: L{wx.Colour} """ return self._bottomcolour def SetMiddleIcon(self, icon): """ Sets The Icon To Be Drawn Near The Center Of SpeedMeter. @param icon: The icon to be drawn @type icon: L{wx.Icon} """ if icon.Ok(): self._middleicon = icon else: # edited 2010-06-13 by jantman to get rid of error - was raising an error as a string print "\nERROR: Invalid Icon Passed To SpeedMeter." return False def GetMiddleIcon(self): """ Gets The Icon To Be Drawn Near The Center Of SpeedMeter. @rtype: L{wx.Icon} """ return self._middleicon def __GetMiddleIconDimens(self): """ USED INTERNALLY ONLY - Undocumented. Do NOT call from outside this class. """ return self._middleicon.GetWidth(), self._middleicon.GetHeight() def __CircleCoords(self, radius, angle, centerX, centerY): """ USED INTERNALLY ONLY - Undocumented. Do NOT call from outside this class. Method to get the coordinates of the circle. """ x = radius*cos(angle) + centerX y = radius*sin(angle) + centerY return x, y def __GetIntersection(self, current, intervals): """ USED INTERNALLY ONLY - Undocumented. Do NOT call from outside this class. """ if self.GetDirection() == "Reverse": interval = intervals[:] interval.reverse() else: interval = intervals indexes = range(len(intervals)) try: intersection = [ind for ind in indexes if interval[ind] <= current <= interval[ind+1]] except: if self.GetDirection() == "Reverse": intersection = [len(intervals) - 1] else: intersection = [0] return intersection[0] def SetFirstGradientColour(self, colour=None): """ Sets The First Gradient Colour (Near The Ticks). @param colour: Color for the second gradient @type colour: L{wx.Colour} """ if colour is None: colour = wx.Colour(145, 220, 200) self._firstgradientcolour = colour def GetFirstGradientColour(self): """ Gets The First Gradient Colour (Near The Ticks). @return: first gradient color @rtype: L{wx.Colour} """ return self._firstgradientcolour def SetSecondGradientColour(self, colour=None): """ Sets The Second Gradient Colour (Near The Center). @param colour: Color for the second gradient @type colour: L{wx.Colour} """ if colour is None: colour = wx.WHITE self._secondgradientcolour = colour def GetSecondGradientColour(self): """ Gets The First Gradient Colour (Near The Center). @return: second gradient color @rtype: L{wx.Colour} """ return self._secondgradientcolour def SetHandStyle(self, style=None): """ Sets The Style For The Hand (Arrow Indicator). By Specifying "Hand" SpeedMeter Will Draw A Polygon That Simulates The Car Speed Control Indicator. Using "Arrow" Will Force SpeedMeter To Draw A Simple Arrow. @param style: hand style, string, either "Arrow" or "Hand" @type style: L{string} """ if style is None: style = "Hand" if style not in ["Hand", "Arrow"]: raise '\nERROR: Hand Style Parameter Should Be One Of "Hand" Or "Arrow".' return self._handstyle = style def GetHandStyle(self): """ Gets The Style For The Hand (Arrow Indicator) @return: hand style, string either "Arrow" or "Hand" @rtype: L{string} """ return self._handstyle def DrawExternalArc(self, draw=True): """ Specify Wheter Or Not You Wish To Draw The External (Thicker) Arc. @param draw: Whether or not to draw the external arc.(default True) @type draw: L{boolean} """ self._drawarc = draw def DrawExternalCircle(self, draw=False): """ Specify Wheter Or Not You Wish To Draw The External (Thicker) Arc as a full circle. @param draw: boolean, whether or not to draw the full circle (default False) @type draw: L{boolean} """ self._drawfullarc = draw def OnMouseMotion(self, event): """ Handles The Mouse Events. Here Only Left Clicks/Drags Are Involved. Should SpeedMeter Have Something More? @todo: Do we even want this? What does it do? Seems like it would allow the user to change the value or something, which is BAD. """ mousex = event.GetX() mousey = event.GetY() if event.Leaving(): return pos = self.GetClientSize() size = self.GetPosition() centerX = self.CenterX centerY = self.CenterY direction = self.GetDirection() if event.LeftIsDown(): angle = atan2(float(mousey) - centerY, centerX - float(mousex)) + pi - self.EndAngle if angle >= 2*pi: angle = angle - 2*pi if direction == "Advance": currentvalue = (self.StartAngle - self.EndAngle - angle)*float(self.Span)/(self.StartAngle - self.EndAngle) + self.StartValue else: currentvalue = (angle)*float(self.Span)/(self.StartAngle - self.EndAngle) + self.StartValue if currentvalue >= self.StartValue and currentvalue <= self.EndValue: self.SetSpeedValue(currentvalue) event.Skip() def GetSpeedStyle(self): """ Returns A List Of Strings And A List Of Integers Containing The Styles. """ stringstyle = [] integerstyle = [] if self._extrastyle & SM_ROTATE_TEXT: stringstyle.append("SM_ROTATE_TEXT") integerstyle.append(SM_ROTATE_TEXT) if self._extrastyle & SM_DRAW_SECTORS: stringstyle.append("SM_DRAW_SECTORS") integerstyle.append(SM_DRAW_SECTORS) if self._extrastyle & SM_DRAW_PARTIAL_SECTORS: stringstyle.append("SM_DRAW_PARTIAL_SECTORS") integerstyle.append(SM_DRAW_PARTIAL_SECTORS) if self._extrastyle & SM_DRAW_HAND: stringstyle.append("SM_DRAW_HAND") integerstyle.append(SM_DRAW_HAND) if self._extrastyle & SM_DRAW_SHADOW: stringstyle.append("SM_DRAW_SHADOW") integerstyle.append(SM_DRAW_SHADOW) if self._extrastyle & SM_DRAW_PARTIAL_FILLER: stringstyle.append("SM_DRAW_PARTIAL_FILLER") integerstyle.append(SM_DRAW_PARTIAL_FILLER) if self._extrastyle & SM_DRAW_SECONDARY_TICKS: stringstyle.append("SM_DRAW_SECONDARY_TICKS") integerstyle.append(SM_DRAW_SECONDARY_TICKS) if self._extrastyle & SM_DRAW_MIDDLE_TEXT: stringstyle.append("SM_DRAW_MIDDLE_TEXT") integerstyle.append(SM_DRAW_MIDDLE_TEXT) if self._extrastyle & SM_DRAW_BOTTOM_TEXT: stringstyle.append("SM_DRAW_BOTTOM_TEXT") integerstyle.append(SM_DRAW_BOTTOM_TEXT) if self._extrastyle & SM_DRAW_BOTTOM_LED: stringstyle.append("SM_DRAW_BOTTOM_LED") integerstyle.append(SM_DRAW_BOTTOM_LED) if self._extrastyle & SM_DRAW_MIDDLE_ICON: stringstyle.append("SM_DRAW_MIDDLE_ICON") integerstyle.append(SM_DRAW_MIDDLE_ICON) if self._extrastyle & SM_DRAW_GRADIENT: stringstyle.append("SM_DRAW_GRADIENT") integerstyle.append(SM_DRAW_GRADIENT) if self._extrastyle & SM_DRAW_FANCY_TICKS: stringstyle.append("SM_DRAW_FANCY_TICKS") integerstyle.append(SM_DRAW_FANCY_TICKS) return stringstyle, integerstyle # below here is stuff added by jantman for the LED control def SetDrawFaded(self, DrawFaded=None, Redraw=False): """ Set the option to draw the faded (non-used) LED segments. @param DrawFaded: Whether or not to draw the unused segments. @type DrawFaded: L{boolean} @param Redraw: Whether or not to redraw NOW. @type Redraw: L{boolean} """ if DrawFaded is None: self._DrawFaded = DrawFaded if DrawFaded != self._DrawFaded: self._DrawFaded = DrawFaded if Redraw: Refresh(False) def _InitLEDInternals(self): """ Sets up the class variables for the LED control stuff. Should ONLY be called INTERNALLY. """ self._LineMargin = None self._LineLength = None self._LineWidth = None self._DigitMargin = None self._LeftStartPos = None def _DrawLED(self, dc, CenterX): """ Handles all of the drawing for the LED control, just an extension to the original SpeedMeter Draw() method. Should ONLY be called INTERNALLY. @todo: this is hard coded to ignore the background - doesn't draw it. If you want something different, you need to change it. @param dc: the DC @type dc: L{dc} @param CenterX: The X coordinate of the center of the gauge, as found in the original SpeedMeter code. @type CenterX: L{int} """ self._RecalcInternals() # Iterate each digit in the value, and draw. if self.DEBUG is True: print "===Drawing LED Value String: " + self._speedStr for i in range(len(self._speedStr)): c = self._speedStr[i] if self.DEBUG: print "Digit Number: " + str(i) print "Drawing Digit: " + c # Draw faded lines if wanted. if self._DrawFaded and (c != '.'): self._DrawDigit(dc, DIGITALL, i) # Draw the digits. if c == '0': self._DrawDigit(dc, DIGIT0, i) elif c == '1': self._DrawDigit(dc, DIGIT1, i) elif c == '2': self._DrawDigit(dc, DIGIT2, i) elif c == '3': self._DrawDigit(dc, DIGIT3, i) elif c == '4': self._DrawDigit(dc, DIGIT4, i) elif c == '5': self._DrawDigit(dc, DIGIT5, i) elif c == '6': self._DrawDigit(dc, DIGIT6, i) elif c == '7': self._DrawDigit(dc, DIGIT7, i) elif c == '8': self._DrawDigit(dc, DIGIT8, i) elif c == '9': self._DrawDigit(dc, DIGIT9, i) elif c == '-': self._DrawDigit(dc, DASH, i) elif c == '.': self._DrawDigit(dc, DECIMALSIGN, (i-1)) elif c == ' ': # skip this pass else: print "Error: Undefined Digit Value: " + c def _DrawDigit(self, dc, Digit, Column): """ Internal code to actually draw the lines that make up a single digit. Should be called INTERNALLY ONLY. @param dc: The DC. @type dc: L{dc} @param Digit: The constant (mask) defining the lines of the specified digit. @type Digit: L{int} @param Column: the number of the column that the digit should be in @type Column: L{int} """ LineColor = self.GetForegroundColour() if Digit == DIGITALL: R = LineColor.Red() / 16 G = LineColor.Green() / 16 B = LineColor.Blue() / 16 LineColor = wx.Colour(R, G, B) XPos = self._LeftStartPos + (Column * (self._LineLength + self._DigitMargin)) # Create a pen and draw the lines. Pen = wx.Pen(LineColor, self._LineWidth, wx.SOLID) dc.SetPen(Pen) if Digit & LINE1: dc.DrawLine(XPos + self._LineMargin*2, self._LineMargin + self.LEDyOffset, XPos + self._LineLength + self._LineMargin*2, self._LineMargin + self.LEDyOffset) if self.DEBUG: print "Line1" if Digit & LINE2: dc.DrawLine(XPos + self._LineLength + self._LineMargin*3, self._LineMargin*2 + self.LEDyOffset, XPos + self._LineLength + self._LineMargin*3, self._LineLength + (self._LineMargin*2) + self.LEDyOffset) if self.DEBUG: print "Line2" if Digit & LINE3: dc.DrawLine(XPos + self._LineLength + self._LineMargin*3, self._LineLength + (self._LineMargin*4) + self.LEDyOffset, XPos + self._LineLength + self._LineMargin*3, self._LineLength*2 + (self._LineMargin*4) + self.LEDyOffset) if self.DEBUG: print "Line3" if Digit & LINE4: dc.DrawLine(XPos + self._LineMargin*2, self._LineLength*2 + (self._LineMargin*5) + self.LEDyOffset, XPos + self._LineLength + self._LineMargin*2, self._LineLength*2 + (self._LineMargin*5) + self.LEDyOffset) if self.DEBUG: print "Line4" if Digit & LINE5: dc.DrawLine(XPos + self._LineMargin, self._LineLength + (self._LineMargin*4) + self.LEDyOffset, XPos + self._LineMargin, self._LineLength*2 + (self._LineMargin*4) + self.LEDyOffset) if self.DEBUG: print "Line5" if Digit & LINE6: dc.DrawLine(XPos + self._LineMargin, self._LineMargin*2 + self.LEDyOffset, XPos + self._LineMargin, self._LineLength + (self._LineMargin*2) + self.LEDyOffset) if self.DEBUG: print "Line6" if Digit & LINE7: dc.DrawLine(XPos + self._LineMargin*2, self._LineLength + (self._LineMargin*3) + self.LEDyOffset, XPos + self._LineMargin*2 + self._LineLength, self._LineLength + (self._LineMargin*3) + self.LEDyOffset) if self.DEBUG: print "Line7" if Digit & DECIMALSIGN: dc.DrawLine(XPos + self._LineLength + self._LineMargin*4, self._LineLength*2 + (self._LineMargin*5) + self.LEDyOffset, XPos + self._LineLength + self._LineMargin*4, self._LineLength*2 + (self._LineMargin*5) + self.LEDyOffset) if self.DEBUG: print "Line DecimalSign" #Dc.SetPen(wxNullPen); def _RecalcInternals(self): """ Recalculates all variables controlling the placement and gemoetry of the digits. Bases it off of the Frame size. This should calculate everything like the gauge center and work off of that. Should be called INTERNALLY ONLY. Dimensions of LED segments Size of character is based on the HEIGH of the widget, NOT the width. Segment height is calculated as follows: Each segment is m_LineLength pixels long. There is m_LineMargin pixels at the top and bottom of each line segment There is m_LineMargin pixels at the top and bottom of each digit Therefore, the heigth of each character is: m_LineMargin : Top digit boarder m_LineMargin+m_LineLength+m_LineMargin : Top half of segment m_LineMargin+m_LineLength+m_LineMargin : Bottom half of segment m_LineMargin : Bottom digit boarder ---------------------- m_LineMargin*6 + m_LineLength*2 == Total height of digit. Therefore, (m_LineMargin*6 + m_LineLength*2) must equal Height Spacing between characters can then be calculated as follows: m_LineMargin : before the digit, m_LineMargin+m_LineLength+m_LineMargin : for the digit width m_LineMargin : after the digit = m_LineMargin*4 + m_LineLength """ # the size params for just the LED area itself size = self.GetClientSize() LEDHeight = int(size.y / 7) # based off of height of 30 in a 214px high client Height = LEDHeight LEDWidth = int(size.x / 2.4) # based off of width of 120 in a 290px wide client ClientWidth = size.x self.LEDyOffset = self.bottomTextBottom if (Height * 0.075) < 1: self._LineMargin = 1 else: self._LineMargin = int(Height * 0.075) if (Height * 0.275) < 1: self._LineLength = 1 else: self._LineLength = int(Height * 0.275) self._LineWidth = self._LineMargin self._DigitMargin = self._LineMargin * 4 # Count the number of characters in the string; '.' characters are not # included because they do not take up space in the display count = 0; for char in self._speedStr: if char != '.': count = count + 1 ValueWidth = (self._LineLength + self._DigitMargin) * count if self._Alignment == gizmos.LED_ALIGN_LEFT: self._LeftStartPos = self._LineMargin + LeftEdge elif self._Alignment == gizmos.LED_ALIGN_RIGHT: self._LeftStartPos = ClientWidth - ValueWidth - self._LineMargin + LeftEdge else: # self._Alignment == gizmos.LED_ALIGN_CENTER: # centered is the default self._LeftStartPos = (ClientWidth /2 ) - (ValueWidth / 2) def SetLEDAlignment(self, Alignment=None, Redraw=False): """ Sets LED digit alignment. @param Alignment - the alignment of the LED digits - valid values are L{gizmos.LED_ALIGN_LEFT}, L{gizmos.LED_ALIGN_RIGHT}, L{gizmos.LED_ALIGN_CENTER} (center is default). @type Alignment: wxLEDValueAlign @param Redraw: Whether or not to redraw NOW. @type Redraw: L{boolean} """ if Alignment is None: self._Alignment = Alignment if Alignment != self._Alignment: self._Alignment = Alignment if Redraw: try: self.UpdateDrawing() except: pass def SetDrawFaded(self, DrawFaded=None, Redraw=False): """ Whether or not to draw the unused line segments. If true, draws them faded. @param DrawFaded: Whether or not to draw the faded segments. (Default False) @type DrawFaded: L{boolean} @param Redraw: Whether or not to redraw NOW. @type Redraw: L{boolean} """ if DrawFaded is None: self._DrawFaded = DrawFaded if DrawFaded != self._DrawFaded: self._DrawFaded = DrawFaded if Redraw: Refresh(False) def SetValueMultiplier(self, multiplier=1): """ Sets the value multiplier. Values set with SetValue() will be multiplied by this amount before being displayed on the LED control. @param multiplier: the value multiplier @type multiplier: L{int} or L{float} @todo: re-do all this by setting a ValueScale (maybe at create time) and using this scale to determine the gauge scale, also divide values by it before feeding into the meter code itself (i.e. LED will show value as passed with SetValue()). """ self._ValueMultiplier = multiplier
"""misc_endpoints.py Classes representing API endpoints that don't subclass JSSObject """ from __future__ import print_function from __future__ import absolute_import import mimetypes import os import sys from xml.etree import ElementTree from .exceptions import MethodNotAllowedError, PostError from .tools import error_handler __all__ = ('CommandFlush', 'FileUpload', 'LogFlush') if sys.version_info.major == 3: basestring = str class CommandFlush(object): _endpoint_path = "commandflush" can_get = False can_put = False can_post = False def __init__(self, jss): """Initialize a new CommandFlush Args: jss: JSS object. """ self.jss = jss @property def url(self): """Return the path subcomponent of the url to this object.""" return self._endpoint_path def command_flush_with_xml(self, data): """Flush commands for devices with a supplied xml string. From the Casper API docs: Status and devices specified in an XML file. Id lists may be specified for <computers>, <computer_groups>, <mobile_devices>, <mobile_device_groups>. Sample file: <commandflush> <status>Pending+Failed</status> <mobile_devices> <mobile_device> <id>1</id> </mobile_device> <mobile_device> <id>2</id> </mobile_device> </mobile_devices> </commandflush> Args: data (string): XML string following the above structure or an ElementTree/Element. Raises: DeleteError if provided url_path has a >= 400 response. """ if not isinstance(data, basestring): data = ElementTree.tostring(data, encoding='UTF-8') self.jss.delete(self.url, data) def command_flush_for(self, id_type, command_id, status): """Flush commands for an individual device. Args: id_type (str): One of 'computers', 'computergroups', 'mobiledevices', or 'mobiledevicegroups'. id_value (str, int, list): ID value(s) for the devices to flush. More than one device should be passed as IDs in a list or tuple. status (str): One of 'Pending', 'Failed', 'Pending+Failed'. Raises: DeleteError if provided url_path has a >= 400 response. """ id_types = ('computers', 'computergroups', 'mobiledevices', 'mobiledevicegroups') status_types = ('Pending', 'Failed', 'Pending+Failed') if id_type not in id_types or status not in status_types: raise ValueError("Invalid arguments.") if isinstance(command_id, list): command_id = ",".join(str(item) for item in command_id) flush_url = "{}/{}/id/{}/status/{}".format( self.url, id_type, command_id, status) self.jss.delete(flush_url) class FileUpload(object): """FileUploads are a special case in the API. They allow you to add file resources to a number of objects on the JSS. To use, instantiate a new FileUpload object, then use the save() method to upload. Once the upload has been posted you may only interact with it through the web interface. You cannot list/get it or delete it through the API. However, you can reuse the FileUpload object if you wish, by changing the parameters, and issuing another save(). """ _endpoint_path = "fileuploads" allowed_kwargs = ('subset',) def __init__(self, j, resource_type, id_type, _id, resource): """Prepare a new FileUpload. Args: j: A JSS object to POST the upload to. resource_type: String. Acceptable Values: Attachments: computers mobiledevices enrollmentprofiles peripherals mobiledeviceenrollmentprofiles Icons: policies ebooks mobiledeviceapplicationsicon Mobile Device Application: mobiledeviceapplicationsipa Disk Encryption diskencryptionconfigurations diskencryptions (synonymous) PPD printers id_type: String of desired ID type: id name _id: Int or String referencing the identity value of the resource to add the FileUpload to. resource: String path to the file to upload. """ resource_types = ["computers", "mobiledevices", "enrollmentprofiles", "peripherals", "mobiledeviceenrollmentprofiles", "policies", "ebooks", "mobiledeviceapplicationsicon", "mobiledeviceapplicationsipa", "diskencryptionconfigurations", "printers"] id_types = ["id", "name"] self.jss = j # Do some basic error checking on parameters. if resource_type in resource_types: self.resource_type = resource_type else: raise TypeError( "resource_type must be one of: %s" % ', '.join(resource_types)) if id_type in id_types: self.id_type = id_type else: raise TypeError("id_type must be one of: %s" % ', '.join(id_types)) self._id = str(_id) basename = os.path.basename(resource) content_type = mimetypes.guess_type(basename)[0] self.resource = {"name": (basename, open(resource, "rb"), content_type)} self._set_upload_url() def _set_upload_url(self): """Generate the full URL for a POST.""" # pylint: disable=protected-access self._upload_url = "/".join([ self.jss._url, self._endpoint_path, self.resource_type, self.id_type, str(self._id)]) # pylint: enable=protected-access def save(self): """POST the object to the JSS.""" try: response = self.jss.session.post( self._upload_url, files=self.resource) except PostError as error: if error.status_code == 409: raise PostError(error) else: raise MethodNotAllowedError(self.__class__.__name__) if response.status_code == 201: if self.jss.verbose: print("POST: Success") print(response.content) elif response.status_code >= 400: error_handler(PostError, response) class LogFlush(object): _endpoint_path = "logflush" def __init__(self, jss): """Initialize a new LogFlush Args: jss: JSS object. """ self.jss = jss @property def url(self): """Return the path subcomponent of the url to this object.""" return self._endpoint_path def log_flush_with_xml(self, data): """Flush logs for devices with a supplied xml string. From the Casper API docs: log, log_id, interval, and devices specified in an XML file. Sample file: <logflush> <log>policy</log> <log_id>2</log_id> <interval>THREE MONTHS</interval> <computers> <computer> <id>1</id> </computer> <computer> <id>2</id> </computer> </computers> </logflush> Args: data (string): XML string following the above structure or an ElementTree/Element. Elements: logflush (root) log (Unknown; "policy" is the only one listed in docs). log_id: Log ID value. interval: Combination of "Zero", "One", "Two", "Three", "Six", and "Day", "Week", "Month", "Year". e.g. ("Three+Months") Please note: The documentation for this specifies the singular form (e.g. "Month"), and plural ("Months") at different times, and further the construction is listed as "THREE MONTHS" elsewhere. Limited testing indicates that pluralization does not matter, nor does capitalization. The "+" seems optional as well. Please test! Device Arrays: Again, acceptable values are not listed in the docs, aside from the example ("computers"). Presumably "mobiledevices", and possibly "computergroups" and "mobiledevicegroups" work. Raises: DeleteError if provided url_path has a >= 400 response. """ if not isinstance(data, basestring): data = ElementTree.tostring(data, encoding='UTF-8') self.jss.delete(self.url, data) def log_flush_for_interval(self, log_type, interval): """Flush logs for an interval of time. Args: log_type (str): Only documented type is "policies". This will be applied by default if nothing is passed. interval (str): Combination of "Zero", "One", "Two", "Three", "Six", and "Day", "Week", "Month", "Year". e.g. ("Three+Months") Please note: The documentation for this specifies the singular form (e.g. "Month"), and plural ("Months") at different times, and further the construction is listed as "THREE MONTHS" elsewhere. Limited testing indicates that pluralization does not matter, nor does capitalization. Please test! No validation is performed on this prior to the request being made. Raises: DeleteError if provided url_path has a >= 400 response. """ if not log_type: log_type = "policies" # The XML for the /logflush basic endpoint allows spaces # instead of "+", so do a replace here just in case. interval = interval.replace(" ", "+") flush_url = "{}/{}/interval/{}".format( self.url, log_type, interval) self.jss.delete(flush_url) def log_flush_for_obj_for_interval(self, log_type, obj_id, interval): """Flush logs for an interval of time for a specific object. Please note, log_type is a variable according to the API docs, but acceptable values are not listed. Only "policies" is demonstrated as an acceptable value. Args: log_type (str): Only documented type is "policies". This will be applied by default if nothing is passed. obj_id (str or int): ID of the object to have logs flushed. interval (str): Combination of "Zero", "One", "Two", "Three", "Six", and "Day", "Week", "Month", "Year". e.g. ("Three+Months") Please note: The documentation for this specifies the singular form (e.g. "Month"), and plural ("Months") at different times, and further the construction is listed as "THREE MONTHS" elsewhere. Limited testing indicates that pluralization does not matter, nor does capitalization. Please test! No validation is performed on this prior to the request being made. Raises: DeleteError if provided url_path has a >= 400 response. """ if not log_type: log_type = "policies" # The XML for the /logflush basic endpoint allows spaces # instead of "+", so do a replace here just in case. interval = interval.replace(" ", "+") flush_url = "{}/{}/id/{}/interval/{}".format( self.url, log_type, obj_id, interval) self.jss.delete(flush_url)
""" Copyright (C) 2015 Louis Dijkstra This file is part of error-model-aligner This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from __future__ import print_function, division from optparse import OptionParser import os import sys from scipy.stats import norm import numpy as np from scipy.optimize import minimize import math __author__ = "Louis Dijkstra" usage = """%prog [options] <.insert-sizes> <.insert-sizes> File containing the insert size observations when there is no indel Outputs the mean and standard deviation of the null model (i.e., a discrete approximation of a Normal distribution that does not allow for negative values) The file .insert-sizes must be orginazed in two columns (tab seperated): x_1 c_1 x_2 c_2 ... ... x_n c_n where x_1 is the minimal insert size observed x_n is the maximum value found. (Note: x_{i+1} = x_i + 1). c_i is the count for x_i. """ def normalizationFactor(mu, sigma): """Returns the normalization factor given mean mu and STD sigma""" return 1.0 / (1.0 - norm.cdf((-mu - 0.5)/sigma)) def f(isize, mu, sigma): p = norm.cdf((isize + 0.5 - mu)/sigma) - norm.cdf((isize - 0.5 - mu)/sigma) if p < sys.float_info.min: return sys.float_info.min return p def loglikelihood(mu, sigma, isizes, counts, n): """Returns the loglikelihood of mu and sigma given the data (isizes, counts and n)""" l = n * math.log(normalizationFactor(mu, sigma)) for isize, count in zip(isizes, counts): l += count * math.log(f(isize, mu, sigma)) return l def aux_loglikelihood(var, isizes, counts, n): mu = var[0] sigma = var[1] return -1.0 * loglikelihood(mu, sigma, isizes, counts, n) def main(): parser = OptionParser(usage=usage) parser.add_option("-f", action="store", dest="maxfun", default=1000, type=int, help="Maximum number of function evaluations (Default = 1000) ") parser.add_option("-i", action="store", dest="maxiter", default=100, type=int, help="Maximum number of iterations (Default = 100) ") parser.add_option("-m", action="store", dest="mu_init", default=100.0, type=float, help="Initial guess for the mean (mu). (Default is 100) ") parser.add_option("-s", action="store", dest="sigma_init", default=10.0, type=float, help="Initial guess for the standard deviation (sigma). (Default is 10) ") parser.add_option("-v", action="store_true", dest="verbose", default=False, help="Verbose. Output of the optimizer is printed. ") (options, args) = parser.parse_args() if (len(args)!=1): parser.print_help() return 1 isizes = [] # insert sizes that were observed counts = [] # number of times these insert sizes were observed for line in open(args[0], 'r'): values = map(int, line.split()) isizes.append(values[0]) counts.append(values[1]) isizes = np.array(isizes) counts = np.array(counts) n = np.sum(counts) res = minimize ( aux_loglikelihood, [options.mu_init, options.sigma_init], args=[isizes, counts, n], method="L-BFGS-B", bounds=[(0, None), (0, None)], options={'disp': options.verbose, 'maxfun': options.maxfun, 'maxiter': options.maxiter}) print("\n*** RESULTS ***\n") print("estimated mean: %lf\t estimated STD: %lf\n"%(res.x[0], res.x[1])) print(res.message) if __name__ == '__main__': sys.exit(main())
from selenium.webdriver.common.by import By from pages.base import BasePage class FirefoxWhatsNew73Page(BasePage): URL_TEMPLATE = '/{locale}/firefox/73.0/whatsnew/all/{params}' _set_default_button_locator = (By.ID, 'set-as-default-button') @property def is_default_browser_button_displayed(self): return self.is_element_displayed(*self._set_default_button_locator)
from bedrock.mozorg.templatetags import misc, social_widgets
import os import time import json from contextlib import nested import mock from nose.tools import eq_, ok_, assert_raises from configman import ConfigurationManager from socorro.external.hbase import hbase_client from socorro.external.crashstorage_base import ( CrashIDNotFound, Redactor, MemoryDumpsMapping ) from socorro.external.hbase.crashstorage import HBaseCrashStorage from socorro.external.hbase.connection_context import \ HBaseConnectionContextPooled from socorro.lib.util import DotDict from socorro.unittest.config import commonconfig from socorro.database.transaction_executor import ( TransactionExecutorWithLimitedBackoff ) from socorro.unittest.testbase import TestCase class SomeThriftError(Exception): pass _run_integration_tests = os.environ.get('RUN_HBASE_INTEGRATION_TESTS', False) if _run_integration_tests in ('false', 'False', 'no', '0'): _run_integration_tests = False if not _run_integration_tests: import logging logging.warning("Skipping HBase integration tests") else: class TestIntegrationHBaseCrashStorage(TestCase): """ If you ever get this:: Traceback (most recent call last): ... socorro.external.hbase.hbase_client.FatalException: the connection is not viable. retries fail: Then try the following: /etc/init.d/hadoop-hbase-master restart /etc/init.d/hadoop-hbase-thrift restart Also, you can look in /var/log/hbase for clues. Still not working, try: hbase shell > describe 'crash_reports' and keep an eye on the logs. """ def tearDown(self): super(TestIntegrationHBaseCrashStorage, self).tearDown() self._truncate_hbase_table() def _truncate_hbase_table(self): connection = hbase_client.HBaseConnectionForCrashReports( commonconfig.hbaseHost.default, commonconfig.hbasePort.default, 100 ) for row in connection.merge_scan_with_prefix( 'crash_reports', '', ['ids:ooid']): index_row_key = row['_rowkey'] connection.client.deleteAllRow( 'crash_reports', index_row_key) # because of HBase's async nature, deleting can take time list(connection.iterator_for_all_legacy_to_be_processed()) def test_basic_hbase_crashstorage(self): mock_logging = mock.Mock() required_config = HBaseCrashStorage.required_config required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'hbase_timeout': 100, 'hbase_host': commonconfig.hbaseHost.default, 'hbase_port': commonconfig.hbasePort.default, }], argv_source=[] ) with config_manager.context() as config: crashstorage = HBaseCrashStorage(config) eq_(list(crashstorage.new_crashes()), []) crash_id = '86b58ff2-9708-487d-bfc4-9dac32121214' raw = ('{"name":"Peter", ' '"submitted_timestamp":"%d"}' % time.time()) fake_raw_dump_1 = 'peter is a swede' fake_raw_dump_2 = 'lars is a norseman' fake_raw_dump_3 = 'adrian is a frenchman' fake_dumps = MemoryDumpsMapping({ 'upload_file_minidump': fake_raw_dump_1, 'lars': fake_raw_dump_2, 'adrian': fake_raw_dump_3 }) crashstorage.save_raw_crash(json.loads(raw), fake_dumps, crash_id) assert config.logger.info.called assert config.logger.info.call_count > 1 msg_tmpl, msg_arg = config.logger.info.call_args_list[1][0] # ie logging.info(<template>, <arg>) msg = msg_tmpl % msg_arg ok_('saved' in msg) ok_(crash_id in msg) raw_crash = crashstorage.get_raw_crash(crash_id) assert isinstance(raw_crash, dict) eq_(raw_crash['name'], 'Peter') dump = crashstorage.get_raw_dump(crash_id) assert isinstance(dump, basestring) ok_('peter is a swede' in dump) dumps = crashstorage.get_raw_dumps(crash_id) assert isinstance(dumps, dict) ok_('upload_file_minidump' in dumps) ok_('lars' in dumps) ok_('adrian' in dumps) eq_(dumps['upload_file_minidump'], fake_dumps['upload_file_minidump']) eq_(dumps['lars'], fake_dumps['lars']) eq_(dumps['adrian'], fake_dumps['adrian']) # hasn't been processed yet assert_raises(CrashIDNotFound, crashstorage.get_processed, crash_id) pro = ('{"name":"Peter",' '"uuid":"86b58ff2-9708-487d-bfc4-9dac32121214", ' '"submitted_timestamp":"%d", ' '"completeddatetime": "%d"}' % (time.time(), time.time())) crashstorage.save_processed(json.loads(pro)) data = crashstorage.get_processed(crash_id) eq_(data['name'], u'Peter') hb_connection = crashstorage.hbaseConnectionPool.connection() ok_(hb_connection.transport.isOpen()) crashstorage.close() ok_(not hb_connection.transport.isOpen()) class TestHBaseCrashStorage(TestCase): def test_hbase_crashstorage_basic_error(self): mock_logging = mock.Mock() required_config = HBaseCrashStorage.get_required_config() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'hbase_timeout': 100, 'hbase_host': commonconfig.hbaseHost.default, 'hbase_port': commonconfig.hbasePort.default, }], argv_source=[] ) with config_manager.context() as config: config.executor_identity = lambda: 'dwight' # bogus thread id hbaseclient_ = 'socorro.external.hbase.crashstorage.hbase_client' with mock.patch(hbaseclient_) as hclient: klass = hclient.HBaseConnectionForCrashReports def retry_raiser(*args, **kwargs): raise SomeThriftError('try again') klass.put_json_dump.side_effect = ValueError('crap!') crashstorage = HBaseCrashStorage(config) raw = ('{"name":"Peter", ' '"submitted_timestamp":"%d"}' % time.time()) # Note, we're not expect it to raise an error assert_raises(ValueError, crashstorage.save_raw_crash, json.loads(raw), raw, "abc123" ) #eq_(instance.put_json_dump.call_count, 3) def test_hbase_crashstorage_error_after_retries(self): cshbaseclient_ = 'socorro.external.hbase.crashstorage.hbase_client' cchbaseclient_ = \ 'socorro.external.hbase.connection_context.hbase_client' with nested(mock.patch(cshbaseclient_), mock.patch(cchbaseclient_)) as (cshclient, cchclient): fake_hbase_client_connection = mock.MagicMock() cshclient.HBaseConnectionForCrashReports.return_value = \ fake_hbase_client_connection fake_put_json_method = mock.MagicMock() cshclient.HBaseConnectionForCrashReports.put_json_dump = \ fake_put_json_method cchclient.HBaseConnectionForCrashReports.return_value = \ fake_hbase_client_connection fake_hbase_client_connection.hbaseThriftExceptions = \ (SomeThriftError,) fake_put_json_method.side_effect = SomeThriftError('try again') config = DotDict({ 'logger': mock.MagicMock(), 'hbase_timeout': 0, 'hbase_host': 'somehost', 'hbase_port': 9090, 'number_of_retries': 2, 'hbase_connection_pool_class': HBaseConnectionContextPooled, 'transaction_executor_class': TransactionExecutorWithLimitedBackoff, 'backoff_delays': [0, 0, 0], 'redactor_class': Redactor, 'forbidden_keys': Redactor.required_config.forbidden_keys.default, 'executor_identity': lambda: 'dwight' # bogus thread id }) crashstorage = HBaseCrashStorage(config) raw = ('{"name":"Peter", ' '"submitted_timestamp":"%d"}' % time.time()) assert_raises(SomeThriftError, crashstorage.save_raw_crash, json.loads(raw), raw, {} ) eq_(fake_put_json_method.call_count, 3) def test_hbase_crashstorage_success_after_retries(self): cshbaseclient_ = 'socorro.external.hbase.crashstorage.hbase_client' cchbaseclient_ = \ 'socorro.external.hbase.connection_context.hbase_client' with nested(mock.patch(cshbaseclient_), mock.patch(cchbaseclient_)) as (cshclient, cchclient): fake_hbase_client_connection = mock.MagicMock() cshclient.HBaseConnectionForCrashReports.return_value = \ fake_hbase_client_connection fake_put_json_method = mock.MagicMock() cshclient.HBaseConnectionForCrashReports.put_json_dump = \ fake_put_json_method cchclient.HBaseConnectionForCrashReports.return_value = \ fake_hbase_client_connection fake_hbase_client_connection.hbaseThriftExceptions = \ (SomeThriftError,) _attempts = [SomeThriftError, SomeThriftError] def retry_raiser_iterator(*args, **kwargs): try: raise _attempts.pop(0) except IndexError: return None fake_put_json_method.side_effect = retry_raiser_iterator config = DotDict({ 'logger': mock.MagicMock(), 'hbase_timeout': 0, 'hbase_host': 'somehost', 'hbase_port': 9090, 'number_of_retries': 2, 'hbase_connection_pool_class': HBaseConnectionContextPooled, 'transaction_executor_class': TransactionExecutorWithLimitedBackoff, 'backoff_delays': [0, 0, 0], 'redactor_class': Redactor, 'forbidden_keys': Redactor.required_config.forbidden_keys.default, 'executor_identity': lambda: 'dwight' # bogus thread id }) crashstorage = HBaseCrashStorage(config) raw = ('{"name":"Peter", ' '"submitted_timestamp":"%d"}' % time.time()) crashstorage.save_raw_crash(json.loads(raw), raw, "abc123") eq_(fake_put_json_method.call_count, 3) def test_hbase_crashstorage_puts_and_gets(self): mock_logging = mock.Mock() required_config = HBaseCrashStorage.get_required_config() required_config.add_option('logger', default=mock_logging) config_manager = ConfigurationManager( [required_config], app_name='testapp', app_version='1.0', app_description='app description', values_source_list=[{ 'logger': mock_logging, 'hbase_timeout': 100, 'hbase_host': commonconfig.hbaseHost.default, 'hbase_port': commonconfig.hbasePort.default, 'transaction_executor_class': TransactionExecutorWithLimitedBackoff, 'backoff_delays': [0, 0, 0], }], argv_source=[] ) with config_manager.context() as config: config.executor_identity = lambda: 'dwight' # bogus thread id hbaseclient_ = 'socorro.external.hbase.crashstorage.hbase_client' with mock.patch(hbaseclient_) as hclient: # test save_raw_crash raw_crash = { "name": "Peter", "email": "bogus@nowhere.org", "url": "http://embarassing.xxx", "submitted_timestamp": "2012-05-04T15:10:00", "user_id": "000-00-0000", } fake_binary_dump = "this a bogus binary dump" expected_raw_crash = raw_crash expected_dump = fake_binary_dump expected_dump_2 = fake_binary_dump + " number 2" # saves us from loooong lines klass = hclient.HBaseConnectionForCrashReports crashstorage = HBaseCrashStorage(config) crashstorage.save_raw_crash(raw_crash, fake_binary_dump, "abc123") eq_( klass.put_json_dump.call_count, 1 ) a = klass.put_json_dump.call_args eq_(len(a[0]), 4) #eq_(a[0][1], "abc123") eq_(a[0][2], expected_raw_crash) eq_(a[0][3], expected_dump) eq_(a[1], {'number_of_retries': 0}) # test save_processed processed_crash = { "name": "Peter", "uuid": "abc123", "email": "bogus@nowhere.org", "url": "http://embarassing.xxx", "user_id": "000-00-0000", } expected_processed_crash = { "name": "Peter", "uuid": "abc123", } expected_unredacted_processed_crash = { "name": "Peter", "uuid": "abc123", "email": "bogus@nowhere.org", "url": "http://embarassing.xxx", "user_id": "000-00-0000", } crashstorage = HBaseCrashStorage(config) crashstorage.save_processed(processed_crash) eq_(klass.put_processed_json.call_count, 1) a = klass.put_processed_json.call_args eq_(len(a[0]), 3) eq_(a[0][1], "abc123") eq_(a[0][2], expected_unredacted_processed_crash) eq_(a[1], {'number_of_retries': 0}) # test get_raw_crash m = mock.Mock(return_value=raw_crash) klass.get_json = m r = crashstorage.get_raw_crash("abc123") ok_(isinstance(r, DotDict)) a = klass.get_json.call_args eq_(len(a[0]), 2) eq_(a[0][1], "abc123") eq_(klass.get_json.call_count, 1) eq_(r, expected_raw_crash) # test get_raw_dump m = mock.Mock(return_value=fake_binary_dump) klass.get_dump = m r = crashstorage.get_raw_dump("abc123") a = klass.get_dump.call_args eq_(len(a[0]), 3) eq_(a[0][1], "abc123") eq_(klass.get_dump.call_count, 1) eq_(r, expected_dump) # test get_raw_dumps m = mock.Mock(return_value={'upload_file_minidump': fake_binary_dump}) klass.get_dumps = m r = crashstorage.get_raw_dumps("abc123") a = klass.get_dumps.call_args eq_(len(a[0]), 2) eq_(a[0][1], "abc123") eq_(klass.get_dumps.call_count, 1) eq_(r, {'upload_file_minidump': expected_dump}) # test get_raw_dumps 2 m = mock.Mock(return_value={'upload_file_minidump': fake_binary_dump, 'aux_1': expected_dump_2}) klass.get_dumps = m r = crashstorage.get_raw_dumps("abc123") a = klass.get_dumps.call_args eq_(len(a[0]), 2) eq_(a[0][1], "abc123") eq_(klass.get_dumps.call_count, 1) eq_(r, {'upload_file_minidump': fake_binary_dump, 'aux_1': expected_dump_2}) # test get_processed m = mock.Mock(return_value=expected_processed_crash) klass.get_processed_json = m r = crashstorage.get_processed("abc123") ok_(isinstance(r, DotDict)) a = klass.get_processed_json.call_args eq_(len(a[0]), 2) eq_(a[0][1], "abc123") eq_(klass.get_processed_json.call_count, 1) eq_(r, expected_processed_crash)
import scrapy class QuotesSpider(scrapy.Spider): name = 'quotes' allowed_domains = ['quotes.topscrape.com/tag/humor/'] start_urls = ['http://quotes.topscrape.com/tag/humor/'] def parse(self, response): for quote in response.css('div.quote'): yield { 'text': quote.css('span.text::text').extract_first(), 'author': quote.xpath('span/small/text()').extract_first(), } next_page = response.css('li.next a::attr("href")').extract_first() if next_page is not None: yield response.follow(next_page, self.parse)
import unittest import tempfile import os from os.path import join import zipfile from git import * from shutil import rmtree from gitbranchhealth.branchhealth import BranchHealthConfig class GitRepoTest(unittest.TestCase): def setUp(self): self.__mOriginTempDir = tempfile.mkdtemp(prefix='gitBranchHealthTest') self.assertTrue(os.path.exists(self.__mOriginTempDir)) # Create our origin first testRepoZipPath = join(self.__findTestDir(), 'testrepo.zip') zipFh = open(testRepoZipPath, 'rb') testRepoZip = zipfile.ZipFile(zipFh) for name in testRepoZip.namelist(): testRepoZip.extract(name, self.__mOriginTempDir) zipFh.close() self.__mOriginGitRepoPath = os.path.join(self.__mOriginTempDir, 'testrepo') originRepo = Repo(self.__mOriginGitRepoPath) self.__mTempDir = tempfile.mkdtemp(prefix='gitBranchHealthTest') os.mkdir(os.path.join(self.__mTempDir, 'testrepo')) self.assertTrue(os.path.exists(self.__mTempDir)) # Now create the local repo self.__mGitRepoPath = os.path.join(self.__mTempDir, 'testrepo') originRepo.clone(self.__mGitRepoPath) self.assertTrue(os.path.exists(self.__mGitRepoPath)) self.__mConfig = BranchHealthConfig(self.__mGitRepoPath) self.__trackAllRemoteBranches() def tearDown(self): pass # rmtree(self.__mTempDir) # rmtree(self.__mOriginTempDir) def getConfig(self): return self.__mConfig def getTempDir(self): return self.__mTempDir ## Private API ### def __trackAllRemoteBranches(self): repo = Repo(self.__mGitRepoPath) for remote in repo.remotes: for branch in remote.refs: localBranchName = branch.name.split('/')[-1] if localBranchName != 'master' and localBranchName != 'HEAD': repo.git.checkout(branch.name, b=localBranchName) repo.heads.master.checkout() def __findTestDir(self): # Find the file called 'testrepo.zip', starting at the current dir for (root, dirs, files) in os.walk('.'): if 'testrepo.zip' in files: return root
"""Module copies data module Variable DIRECTIONS""" from data import DIRECTIONS DIRECTIONS = DIRECTIONS[0:3]+('West',)
import bot import config if __name__ == '__main__': bot.init(config) bot.cancel_all()
import os MOZ_OBJDIR = 'obj-firefox' config = { 'default_actions': [ 'clobber', 'clone-tools', 'checkout-sources', #'setup-mock', 'build', #'upload-files', #'sendchange', 'check-test', 'valgrind-test', #'generate-build-stats', #'update', ], 'stage_platform': 'linux64-valgrind', 'publish_nightly_en_US_routes': False, 'build_type': 'valgrind', 'tooltool_manifest_src': "browser/config/tooltool-manifests/linux64/\ releng.manifest", 'platform_supports_post_upload_to_latest': False, 'enable_signing': False, 'enable_talos_sendchange': False, 'perfherder_extra_options': ['valgrind'], #### 64 bit build specific ##### 'env': { 'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'), 'MOZ_AUTOMATION': '1', 'DISPLAY': ':2', 'HG_SHARE_BASE_DIR': '/builds/hg-shared', 'MOZ_OBJDIR': 'obj-firefox', 'TINDERBOX_OUTPUT': '1', 'TOOLTOOL_CACHE': '/builds/tooltool_cache', 'TOOLTOOL_HOME': '/builds', 'MOZ_CRASHREPORTER_NO_REPORT': '1', 'CCACHE_DIR': '/builds/ccache', 'CCACHE_COMPRESS': '1', 'CCACHE_UMASK': '002', 'LC_ALL': 'C', ## 64 bit specific 'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\ /usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\ /tools/python27-mercurial/bin:/home/cltbld/bin', }, 'src_mozconfig': 'browser/config/mozconfigs/linux64/valgrind', ####################### }
from django.db import migrations, models from django.conf import settings import editorsnotes.main.fields class Migration(migrations.Migration): dependencies = [ ('main', '0018_auto_20151019_1331'), ] operations = [ migrations.AlterField( model_name='document', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='document', name='creator', field=models.ForeignKey(related_name='created_document_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='document', name='last_updated', field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'), ), migrations.AlterField( model_name='document', name='last_updater', field=models.ForeignKey(related_name='last_to_update_document_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last user to update this item.'), ), migrations.AlterField( model_name='featureditem', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='featureditem', name='creator', field=models.ForeignKey(related_name='created_featureditem_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='note', name='assigned_users', field=models.ManyToManyField(help_text='Users who have been assigned to this note.', to=settings.AUTH_USER_MODEL, blank=True), ), migrations.AlterField( model_name='note', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='note', name='creator', field=models.ForeignKey(related_name='created_note_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='note', name='is_private', field=models.BooleanField(default=False, help_text=b"If true, will only be be viewable to users who belong to the note's project."), ), migrations.AlterField( model_name='note', name='last_updated', field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'), ), migrations.AlterField( model_name='note', name='last_updater', field=models.ForeignKey(related_name='last_to_update_note_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last user to update this item.'), ), migrations.AlterField( model_name='note', name='license', field=models.ForeignKey(blank=True, to='licensing.License', help_text='The license under which this note is available.', null=True), ), migrations.AlterField( model_name='note', name='markup', field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True), ), migrations.AlterField( model_name='note', name='markup_html', field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for this item rendered into HTML.', null=True, editable=False, blank=True), ), migrations.AlterField( model_name='note', name='project', field=models.ForeignKey(related_name='notes', to='main.Project', help_text='The project to which this note belongs.'), ), migrations.AlterField( model_name='note', name='status', field=models.CharField(default='1', help_text='The status of the note. "Open" for outstanding, "Closed" for finished, or "Hibernating" for somewhere in between.', max_length=1, choices=[('0', 'closed'), ('1', 'open'), ('2', 'hibernating')]), ), migrations.AlterField( model_name='note', name='title', field=models.CharField(help_text='The title of the note.', max_length='80'), ), migrations.AlterField( model_name='project', name='image', field=models.ImageField(upload_to='project_images', null=True, verbose_name='An image representing this project.', blank=True), ), migrations.AlterField( model_name='project', name='markup', field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True), ), migrations.AlterField( model_name='project', name='markup_html', field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for this item rendered into HTML.', null=True, editable=False, blank=True), ), migrations.AlterField( model_name='project', name='name', field=models.CharField(help_text='The name of the project.', max_length='80'), ), migrations.AlterField( model_name='projectinvitation', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='projectinvitation', name='creator', field=models.ForeignKey(related_name='created_projectinvitation_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='scan', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='scan', name='creator', field=models.ForeignKey(related_name='created_scan_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='topic', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='topic', name='creator', field=models.ForeignKey(related_name='created_topic_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='topic', name='last_updated', field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'), ), migrations.AlterField( model_name='topic', name='last_updater', field=models.ForeignKey(related_name='last_to_update_topic_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last user to update this item.'), ), migrations.AlterField( model_name='topic', name='markup', field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True), ), migrations.AlterField( model_name='topic', name='markup_html', field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for this item rendered into HTML.', null=True, editable=False, blank=True), ), migrations.AlterField( model_name='topicassignment', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='topicassignment', name='creator', field=models.ForeignKey(related_name='created_topicassignment_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='transcript', name='created', field=models.DateTimeField(auto_now_add=True, verbose_name='The time this item was created.'), ), migrations.AlterField( model_name='transcript', name='creator', field=models.ForeignKey(related_name='created_transcript_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The user who created this item.'), ), migrations.AlterField( model_name='transcript', name='last_updated', field=models.DateTimeField(auto_now=True, verbose_name='The last time this item was edited.'), ), migrations.AlterField( model_name='transcript', name='last_updater', field=models.ForeignKey(related_name='last_to_update_transcript_set', editable=False, to=settings.AUTH_USER_MODEL, help_text='The last user to update this item.'), ), migrations.AlterField( model_name='transcript', name='markup', field=models.TextField(help_text='Text for this item that uses CommonMark syntax, with Working Notes-specific additions for notes, topics, and documents.', null=True, blank=True), ), migrations.AlterField( model_name='transcript', name='markup_html', field=editorsnotes.main.fields.XHTMLField(help_text='The markup text for this item rendered into HTML.', null=True, editable=False, blank=True), ), migrations.AlterField( model_name='user', name='profile', field=models.CharField(help_text='Profile text for a user.', max_length=1000, null=True, blank=True), ), ]
""" Login and logout views for the browseable API. Add these to your root URLconf if you're using the browseable API and your API requires authentication. The urls must be namespaced as 'rest_framework', and you should make sure your authentication settings include `SessionAuthentication`. urlpatterns = patterns('', ... url(r'^auth', include('rest_framework.urls', namespace='rest_framework')) ) """ from django.conf.urls.defaults import patterns, url template_name = {'template_name': 'rest_framework/login.html'} urlpatterns = patterns('django.contrib.auth.views', url(r'^login/$', 'login', template_name, name='login'), url(r'^logout/$', 'logout', template_name, name='logout'), )
__author__ = 'marijn' from setuptools import setup setup( name="goal_notifier", version="0.0.0", license="AGPL3", packages=['goal_notifier'], requires=[ "google-api-python-client", "pykka", "pydub", "pyopenssl", ], scripts=["goal_notifier"] )
from django.contrib.auth.models import User, Group from django.test import Client from rest_framework import status from app.models import Project, Task from app.models import Setting from app.models import Theme from webodm import settings from .classes import BootTestCase from django.core.exceptions import ValidationError class TestApp(BootTestCase): fixtures = ['test_processingnodes', ] def setUp(self): self.credentials = { 'username': 'testuser', 'password': 'test1234', 'email': 'test@mail.com'} # Create a test Group my_group, created = Group.objects.get_or_create(name='test_group') # Add user to test Group User.objects.get(pk=1).groups.add(my_group) def test_user_login(self): c = Client() # User points the browser to the landing page res = c.post('/', follow=True) # the user is not logged in self.assertFalse(res.context['user'].is_authenticated) # and is redirected to the login page self.assertRedirects(res, '/login/') # The login page is being rendered by the correct template self.assertTemplateUsed(res, 'registration/login.html') # asks the user to login using a set of valid credentials res = c.post('/login/', data=self.credentials, follow=True) # The system acknowledges him self.assertTrue(res.context['user'].is_authenticated) # and moves him at the dashboard self.assertTemplateUsed(res, 'app/dashboard.html') def test_views(self): c = Client() # Connecting to dashboard without auth redirects to / res = c.get('/dashboard/', follow=True) self.assertFalse(res.context['user'].is_authenticated) self.assertRedirects(res, '/login/?next=/dashboard/') res = c.get('/processingnode/1/', follow=True) self.assertRedirects(res, '/login/?next=/processingnode/1/') res = c.get('/map/project/1/', follow=True) self.assertRedirects(res, '/login/?next=/map/project/1/') res = c.get('/3d/project/1/task/1/', follow=True) self.assertRedirects(res, '/login/?next=/3d/project/1/task/1/') # Login c.post('/login/', data=self.credentials, follow=True) # We should have a project created from the dashboard self.assertTrue(Project.objects.count() >= 1) # Can access API page res = c.get('/api/') self.assertTrue(res.status_code == status.HTTP_200_OK) # We can access a processingnode view that exists res = c.get('/processingnode/1/') self.assertTrue(res.status_code == 200) self.assertTemplateUsed(res, 'app/processing_node.html') # We can access a processingnode that is offline # (and there's a warning message when we do that) res = c.get('/processingnode/2/') self.assertTrue(res.status_code == 200) self.assertTemplateUsed(res, 'app/processing_node.html') message = list(res.context['messages'])[0] self.assertEqual(message.tags, 'warning') self.assertTrue("offline" in message.message) res = c.get('/processingnode/9999/') self.assertTrue(res.status_code == 404) res = c.get('/processingnode/abc/') self.assertTrue(res.status_code == 404) # /map/ and /3d/ views user = User.objects.get(username="testuser") other_user = User.objects.get(username="testuser2") project = Project.objects.create(owner=user) task = Task.objects.create(project=project) other_project = Project.objects.create(owner=other_user) other_task = Task.objects.create(project=other_project) # Cannot access a project that we have no access to, or that does not exist for project_id in [other_project.id, 99999]: res = c.get('/map/project/{}/'.format(project_id)) self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND) # We can access a project that we have access to res = c.get('/map/project/{}/'.format(project.id)) self.assertTrue(res.status_code == status.HTTP_200_OK) # 3D views need project and task parameters res = c.get('/3d/project/{}/'.format(project.id)) self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND) # Cannot access a 3d view for a task we have no access to res = c.get('/3d/project/{}/task/{}/'.format(other_project.id, other_task.id)) self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND) # Can access 3d view for task we have access to res = c.get('/3d/project/{}/task/{}/'.format(project.id, task.id)) self.assertTrue(res.status_code == status.HTTP_200_OK) # Cannot access public URLs unless a task is shared def test_public_views(client, expectedStatus): res = client.get('/public/task/{}/map/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/3d/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/iframe/3d/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/iframe/map/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/json/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) test_public_views(c, status.HTTP_404_NOT_FOUND) # Share task task.public = True task.save() # Can now access URLs even as anonymous user ac = Client() test_public_views(ac, status.HTTP_200_OK) def test_admin_views(self): c = Client() c.login(username='testsuperuser', password='test1234') settingId = Setting.objects.all()[0].id # During tests, sometimes this is != 1 themeId = Theme.objects.all()[0].id # During tests, sometimes this is != 1 # Can access admin menu items admin_menu_items = ['/admin/app/setting/{}/change/'.format(settingId), '/admin/app/theme/{}/change/'.format(themeId), '/admin/', '/admin/app/plugin/', '/admin/auth/user/', '/admin/auth/group/', ] for url in admin_menu_items: res = c.get(url) self.assertEqual(res.status_code, status.HTTP_200_OK) # Cannot access dev tools (not in dev mode) settings.DEV = False self.assertEqual(c.get('/dev-tools/').status_code, status.HTTP_404_NOT_FOUND) settings.DEV = True # Can access in dev mode self.assertEqual(c.get('/dev-tools/').status_code, status.HTTP_200_OK) # Cannot access admin views as normal user c.logout() c.login(username='testuser', password='test1234') # Can never access dev tools as user, even in dev mode self.assertRedirects(c.get('/dev-tools/', follow=True), '/login/?next=/dev-tools/') settings.DEV = False for url in admin_menu_items: res = c.get(url, follow=True) self.assertRedirects(res, '/admin/login/?next={}'.format(url)) def test_default_group(self): # It exists self.assertTrue(Group.objects.filter(name='Default').count() == 1) # Verify that all new users are assigned to default group u = User.objects.create_user(username="default_user") u.refresh_from_db() self.assertTrue(u.groups.filter(name='Default').count() == 1) def test_projects(self): # Get a normal user user = User.objects.get(username="testuser") self.assertFalse(user.is_superuser) # Create a new project p = Project.objects.create(owner=user, name="test") # Have the proper permissions been set? self.assertTrue(user.has_perm("view_project", p)) self.assertTrue(user.has_perm("add_project", p)) self.assertTrue(user.has_perm("change_project", p)) self.assertTrue(user.has_perm("delete_project", p)) # Get a superuser superUser = User.objects.get(username="testsuperuser") self.assertTrue(superUser.is_superuser) # He should also have permissions, although not explicitly set self.assertTrue(superUser.has_perm("delete_project", p)) # Get another user anotherUser = User.objects.get(username="testuser2") self.assertFalse(anotherUser.is_superuser) # Should not have permission self.assertFalse(anotherUser.has_perm("delete_project", p)) def test_tasks(self): # Create a new task p = Project.objects.create(owner=User.objects.get(username="testuser"), name="test") task = Task.objects.create(project=p) # Test options validation task.options = [{'name': 'test', 'value': 1}] self.assertTrue(task.save() is None) task.options = {'test': 1} self.assertRaises(ValidationError, task.save) task.options = [{'name': 'test', 'value': 1}, {"invalid": 1}] self.assertRaises(ValidationError, task.save)
from jormungandr.interfaces.v1 import Uri from jormungandr.interfaces.v1 import Coverage from jormungandr.interfaces.v1 import Journeys from jormungandr.interfaces.v1 import Schedules from jormungandr.interfaces.v1 import Places from jormungandr.interfaces.v1 import Ptobjects from jormungandr.interfaces.v1 import Coord from jormungandr.interfaces.v1 import Disruptions from jormungandr.interfaces.v1 import Calendars from jormungandr.interfaces.v1 import converters_collection_type from jormungandr.interfaces.v1 import Status from werkzeug.routing import BaseConverter, FloatConverter, PathConverter from jormungandr.modules_loader import AModule from resources import Index class RegionConverter(BaseConverter): """ The region you want to query""" def __init__(self, *args, **kwargs): BaseConverter.__init__(self, *args, **kwargs) self.type_ = "string" self.regex = '[^(/;)]+' class LonConverter(FloatConverter): """ The longitude of where the coord you want to query""" def __init__(self, *args, **kwargs): FloatConverter.__init__(self, *args, **kwargs) self.type_ = "float" self.regex = '-?\\d+(\\.\\d+)?' class LatConverter(FloatConverter): """ The latitude of where the coord you want to query""" def __init__(self, *args, **kwargs): FloatConverter.__init__(self, *args, **kwargs) self.type_ = "float" self.regex = '-?\\d+(\\.\\d+)?' class UriConverter(PathConverter): """First part of the uri""" def __init__(self, *args, **kwargs): PathConverter.__init__(self, *args, **kwargs) self.type_ = "string" class IdConverter(BaseConverter): """Id of the object you want to query""" def __init__(self, *args, **kwargs): BaseConverter.__init__(self, *args, **kwargs) self.type_ = "string" class V1Routing(AModule): def __init__(self, api, name): super(V1Routing, self).__init__(api, name, description='Current version of navitia API', status='current', index_endpoint='index') def setup(self): self.api.app.url_map.converters['region'] = RegionConverter self.api.app.url_map.converters['lon'] = LonConverter self.api.app.url_map.converters['lat'] = LatConverter self.api.app.url_map.converters['uri'] = UriConverter self.api.app.url_map.converters['id'] = IdConverter self.api.app.url_map.strict_slashes = False self.module_resources_manager.register_resource(Index.Index()) self.add_resource(Index.Index, '/', '', endpoint='index') self.module_resources_manager.register_resource(Index.TechnicalStatus()) self.add_resource(Index.TechnicalStatus, '/status', endpoint='technical_status') coverage = '/coverage/' region = coverage + '<region:region>/' coord = coverage + '<lon:lon>;<lat:lat>/' self.add_resource(Coverage.Coverage, coverage, region, coord, endpoint='coverage') self.add_resource(Coord.Coord, '/coord/<lon:lon>;<lat:lat>', '/coords/<lon:lon>;<lat:lat>', endpoint='coord') collecs = converters_collection_type.collections_to_resource_type.keys() for collection in collecs: self.add_resource(getattr(Uri, collection)(True), region + collection, coord + collection, region + '<uri:uri>/' + collection, coord + '<uri:uri>/' + collection, endpoint=collection + '.collection') self.add_resource(getattr(Uri, collection)(False), region + collection + '/<id:id>', coord + collection + '/<id:id>', region + '<uri:uri>/' + collection + '/<id:id>', coord + '<uri:uri>/' + collection + '/<id:id>', endpoint=collection + '.id') collecs = ["routes", "lines", "line_groups", "networks", "stop_areas", "stop_points", "vehicle_journeys"] for collection in collecs: self.add_resource(getattr(Uri, collection)(True), '/' + collection, endpoint=collection + '.external_codes') self.add_resource(Places.Places, region + 'places', coord + 'places', '/places', endpoint='places') self.add_resource(Ptobjects.Ptobjects, region + 'pt_objects', coord + 'pt_objects', endpoint='pt_objects') self.add_resource(Places.PlaceUri, region + 'places/<id:id>', coord + 'places/<id:id>', endpoint='place_uri') self.add_resource(Places.PlacesNearby, region + 'places_nearby', coord + 'places_nearby', region + '<uri:uri>/places_nearby', coord + '<uri:uri>/places_nearby', endpoint='places_nearby') self.add_resource(Journeys.Journeys, region + '<uri:uri>/journeys', coord + '<uri:uri>/journeys', region + 'journeys', coord + 'journeys', '/journeys', endpoint='journeys') self.add_resource(Schedules.RouteSchedules, region + '<uri:uri>/route_schedules', coord + '<uri:uri>/route_schedules', '/route_schedules', endpoint='route_schedules') self.add_resource(Schedules.NextArrivals, region + '<uri:uri>/arrivals', coord + '<uri:uri>/arrivals', region + 'arrivals', coord + 'arrivals', endpoint='arrivals') self.add_resource(Schedules.NextDepartures, region + '<uri:uri>/departures', coord + '<uri:uri>/departures', region + 'departures', coord + 'departures', endpoint='departures') self.add_resource(Schedules.StopSchedules, region + '<uri:uri>/stop_schedules', coord + '<uri:uri>/stop_schedules', '/stop_schedules', endpoint='stop_schedules') self.add_resource(Disruptions.TrafficReport, region + 'traffic_reports', region + '<uri:uri>/traffic_reports', endpoint='traffic_reports') self.add_resource(Status.Status, region + 'status', endpoint='status') self.add_resource(Calendars.Calendars, region + 'calendars', region + '<uri:uri>/calendars', region + "calendars/<id:id>", endpoint="calendars")
from odoo import _, api, fields, models from odoo.exceptions import ValidationError class EbillPaymentContract(models.Model): _inherit = "ebill.payment.contract" paynet_account_number = fields.Char(string="Paynet ID", size=20) is_paynet_contract = fields.Boolean( compute="_compute_is_paynet_contract", store=False ) paynet_service_id = fields.Many2one( comodel_name="paynet.service", string="Paynet Service", ondelete="restrict" ) payment_type = fields.Selection( selection=[("qr", "QR"), ("isr", "ISR")], string="Payment method", default="qr", help="Payment type to use for the invoices sent," " PDF will be generated and attached accordingly.", ) @api.depends("transmit_method_id") def _compute_is_paynet_contract(self): transmit_method = self.env.ref("ebill_paynet.paynet_transmit_method") for record in self: record.is_paynet_contract = record.transmit_method_id == transmit_method @api.constrains("transmit_method_id", "paynet_account_number") def _check_paynet_account_number(self): for contract in self: if not contract.is_paynet_contract: continue if not contract.paynet_account_number: raise ValidationError( _("The Paynet ID is required for a Paynet contract.") ) @api.constrains("transmit_method_id", "paynet_service_id") def _check_paynet_service_id(self): for contract in self: if contract.is_paynet_contract and not contract.paynet_service_id: raise ValidationError( _("A Paynet service is required for a Paynet contract.") )
import pytest from django.conf import settings from shoop.core.pricing import get_pricing_module from shoop.core.pricing.default_pricing import DefaultPricingModule from shoop.testing.factories import ( create_product, create_random_person, get_default_customer_group, get_default_shop ) from shoop.testing.utils import apply_request_middleware original_pricing_module = settings.SHOOP_PRICING_MODULE def setup_module(module): settings.SHOOP_PRICING_MODULE = "default_pricing" def teardown_module(module): settings.SHOOP_PRICING_MODULE = original_pricing_module def get_shop_with_tax(include_tax): shop = get_default_shop() shop.prices_include_tax = include_tax shop.save() return shop def initialize_test(rf, include_tax=False): shop = get_shop_with_tax(include_tax=include_tax) group = get_default_customer_group() customer = create_random_person() customer.groups.add(group) customer.save() request = rf.get("/") request.shop = shop apply_request_middleware(request) request.customer = customer return request, shop, group def test_module_is_active(): # this test is because we want to make sure `SimplePricing` is active module = get_pricing_module() assert isinstance(module, DefaultPricingModule) @pytest.mark.django_db def test_default_price_none_allowed(rf): request, shop, group = initialize_test(rf, False) shop = get_default_shop() product = create_product("test-product", shop=shop, default_price=None) assert product.get_price(request) == shop.create_price(0) @pytest.mark.django_db def test_default_price_zero_allowed(rf): request, shop, group = initialize_test(rf, False) shop = get_default_shop() product = create_product("test-product", shop=shop, default_price=0) assert product.get_price(request) == shop.create_price(0) @pytest.mark.django_db def test_default_price_value_allowed(rf): request, shop, group = initialize_test(rf, False) shop = get_default_shop() product = create_product("test-product", shop=shop, default_price=100) assert product.get_price(request) == shop.create_price(100) @pytest.mark.django_db def test_non_one_quantity(rf): request, shop, group = initialize_test(rf, False) shop = get_default_shop() product = create_product("test-product", shop=shop, default_price=100) assert product.get_price(request, quantity=5) == shop.create_price(500)
import deform from pyramid.view import view_config from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS from pontus.default_behavior import Cancel from pontus.form import FormView from pontus.view import BasicView from pontus.view_operation import MultipleView from novaideo.content.processes.reports_management.behaviors import Restor from novaideo.core import SignalableEntity from novaideo import _ class RestorViewStudyRestor(BasicView): title = _('Alert for restoring') name = 'alertforpublication' template = 'novaideo:views/reports_management/templates/alert_restor.pt' def update(self): result = {} values = {'context': self.context} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result class RestorFormView(FormView): title = _('Restore') behaviors = [Restor, Cancel] formid = 'formrestor' name = 'formrestor' def before_update(self): self.action = self.request.resource_url( self.context, 'novaideoapi', query={'op': 'update_action_view', 'node_id': Restor.node_definition.id}) self.schema.widget = deform.widget.FormWidget( css_class='deform novaideo-ajax-form') @view_config( name='restor', context=SignalableEntity, renderer='pontus:templates/views_templates/grid.pt', ) class RestorView(MultipleView): title = _('Restore') name = 'restor' behaviors = [Restor] viewid = 'restorentity' template = 'pontus:templates/views_templates/simple_multipleview.pt' views = (RestorViewStudyRestor, RestorFormView) validators = [Restor.get_validator()] DEFAULTMAPPING_ACTIONS_VIEWS.update( {Restor: RestorView})
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from bika.lims.browser.bika_listing import BikaListingTable from bika.lims.browser.worksheet.views.analyses import AnalysesView class AnalysesTransposedView(AnalysesView): """ The view for displaying the table of manage_results transposed. Analysis Requests are displayed in columns and analyses in rows. Uses most of the logic provided by BikaListingView through bika.lims.worksheet.views.AnalysesView to generate the items, but renders its own template, which is highly specific for display analysis results. Because of this, some generic BikaListing functionalities, such as sorting, pagination, contextual menus for columns, etc. will not work in this view. """ def contents_table(self, table_only = True): """ Overrides contents_table method from the parent class BikaListingView, using the transposed template instead of the classic template. """ table = AnalysesTransposedTable(bika_listing = self, table_only = True) return table.render(self) class AnalysesTransposedTable(BikaListingTable): """ The BikaListingTable that uses a transposed template for displaying the results. """ render = ViewPageTemplateFile("../templates/analyses_transposed.pt") render_cell = ViewPageTemplateFile("../templates/analyses_transposed_cell.pt") def __init__(self, bika_listing = None, table_only = False): BikaListingTable.__init__(self, bika_listing, True) self.rows_headers = [] self.trans_items = {} self.positions = [] self._transpose_data() def _transpose_data(self): cached = [] index = 0 #ignore = ['Analysis', 'Service', 'Result', 'ResultDM'] include = ['Attachments', 'DetectionLimit', 'DueDate','Pos', 'ResultDM'] for col in self.bika_listing.review_state['columns']: if col == 'Result': # Further interims will be inserted in this position resindex = index if col not in include: continue lcol = self.bika_listing.columns[col] self.rows_headers.append({'id': col, 'title': lcol['title'], 'type': lcol.get('type',''), 'row_type': 'field', 'hidden': not lcol.get('toggle', True), 'input_class': lcol.get('input_class',''), 'input_width': lcol.get('input_width','')}) cached.append(col) index += 1 for item in self.items: if item['Service'] not in cached: self.rows_headers.insert(resindex, {'id': item['Service'], 'title': item['title'], 'type': item.get('type',''), 'row_type': 'analysis', 'index': index}) resindex += 1 cached.append(item['Service']) pos = item['Pos'] if pos in self.trans_items: self.trans_items[pos][item['Service']] = item else: self.trans_items[pos] = {item['Service']: item} if pos not in self.positions: self.positions.append(pos) def rendered_items(self, cat=None, **kwargs): return '' def render_row_cell(self, rowheader, position = ''): self.current_rowhead = rowheader self.current_position = position if rowheader['row_type'] == 'field': # Only the first item for this position contains common # data for all the analyses with the same position its = [i for i in self.items if i['Pos'] == position] self.current_item = its[0] if its else {} elif position in self.trans_items \ and rowheader['id'] in self.trans_items[position]: self.current_item = self.trans_items[position][rowheader['id']] else: return '' return self.render_cell()
from odoo import api, models, fields class ExceptionRule(models.Model): _inherit = 'exception.rule' rule_group = fields.Selection( selection_add=[('sale', 'Sale')], ) model = fields.Selection( selection_add=[ ('sale.order', 'Sale order'), ('sale.order.line', 'Sale order line'), ]) class SaleOrder(models.Model): _inherit = ['sale.order', 'base.exception'] _name = 'sale.order' _order = 'main_exception_id asc, date_order desc, name desc' rule_group = fields.Selection( selection_add=[('sale', 'Sale')], default='sale', ) @api.model def test_all_draft_orders(self): order_set = self.search([('state', '=', 'draft')]) order_set.test_exceptions() return True @api.constrains('ignore_exception', 'order_line', 'state') def sale_check_exception(self): orders = self.filtered(lambda s: s.state == 'sale') if orders: orders._check_exception() @api.onchange('order_line') def onchange_ignore_exception(self): if self.state == 'sale': self.ignore_exception = False @api.multi def action_confirm(self): if self.detect_exceptions(): return self._popup_exceptions() else: return super(SaleOrder, self).action_confirm() @api.multi def action_draft(self): res = super(SaleOrder, self).action_draft() orders = self.filtered(lambda s: s.ignore_exception) orders.write({ 'ignore_exception': False, }) return res def _sale_get_lines(self): self.ensure_one() return self.order_line @api.model def _get_popup_action(self): action = self.env.ref('sale_exception.action_sale_exception_confirm') return action
from . import stock_procurement_split
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from open_municipio.locations.models import Location class LocationAdmin(admin.ModelAdmin): list_display = ('name', 'count') admin.site.register(Location, LocationAdmin)
import logging import json from django.test.client import Client, RequestFactory from django.test.utils import override_settings from django.contrib.auth.models import User from django.core.management import call_command from django.core.urlresolvers import reverse from mock import patch, ANY, Mock from nose.tools import assert_true, assert_equal # pylint: disable=E0611 from opaque_keys.edx.locations import SlashSeparatedCourseKey from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE from django_comment_client.base import views from django_comment_client.tests.unicode import UnicodeTestMixin from django_comment_common.models import Role, FORUM_ROLE_STUDENT from django_comment_common.utils import seed_permissions_roles from student.tests.factories import CourseEnrollmentFactory, UserFactory from util.testing import UrlResetMixin from xmodule.modulestore.tests.factories import CourseFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase log = logging.getLogger(__name__) CS_PREFIX = "http://localhost:4567/api/v1" class MockRequestSetupMixin(object): def _create_repsonse_mock(self, data): return Mock(text=json.dumps(data), json=Mock(return_value=data))\ def _set_mock_request_data(self, mock_request, data): mock_request.return_value = self._create_repsonse_mock(data) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) @patch('lms.lib.comment_client.utils.requests.request') class ViewsTestCase(UrlResetMixin, ModuleStoreTestCase, MockRequestSetupMixin): @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): # Patching the ENABLE_DISCUSSION_SERVICE value affects the contents of urls.py, # so we need to call super.setUp() which reloads urls.py (because # of the UrlResetMixin) super(ViewsTestCase, self).setUp(create_user=False) # create a course self.course = CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course') self.course_id = self.course.id # seed the forums permissions and roles call_command('seed_permissions_roles', self.course_id.to_deprecated_string()) # Patch the comment client user save method so it does not try # to create a new cc user when creating a django user with patch('student.models.cc.User.save'): uname = 'student' email = 'student@edx.org' password = 'test' # Create the user and make them active so we can log them in. self.student = User.objects.create_user(uname, email, password) self.student.is_active = True self.student.save() # Enroll the student in the course CourseEnrollmentFactory(user=self.student, course_id=self.course_id) self.client = Client() assert_true(self.client.login(username='student', password='test')) def test_create_thread(self, mock_request): mock_request.return_value.status_code = 200 self._set_mock_request_data(mock_request, { "thread_type": "discussion", "title": "Hello", "body": "this is a post", "course_id": "MITx/999/Robot_Super_Course", "anonymous": False, "anonymous_to_peers": False, "commentable_id": "i4x-MITx-999-course-Robot_Super_Course", "created_at": "2013-05-10T18:53:43Z", "updated_at": "2013-05-10T18:53:43Z", "at_position_list": [], "closed": False, "id": "518d4237b023791dca00000d", "user_id": "1", "username": "robot", "votes": { "count": 0, "up_count": 0, "down_count": 0, "point": 0 }, "abuse_flaggers": [], "type": "thread", "group_id": None, "pinned": False, "endorsed": False, "unread_comments_count": 0, "read": False, "comments_count": 0, }) thread = { "thread_type": "discussion", "body": ["this is a post"], "anonymous_to_peers": ["false"], "auto_subscribe": ["false"], "anonymous": ["false"], "title": ["Hello"], } url = reverse('create_thread', kwargs={'commentable_id': 'i4x-MITx-999-course-Robot_Super_Course', 'course_id': self.course_id.to_deprecated_string()}) response = self.client.post(url, data=thread) assert_true(mock_request.called) mock_request.assert_called_with( 'post', '{prefix}/i4x-MITx-999-course-Robot_Super_Course/threads'.format(prefix=CS_PREFIX), data={ 'thread_type': 'discussion', 'body': u'this is a post', 'anonymous_to_peers': False, 'user_id': 1, 'title': u'Hello', 'commentable_id': u'i4x-MITx-999-course-Robot_Super_Course', 'anonymous': False, 'course_id': u'MITx/999/Robot_Super_Course', }, params={'request_id': ANY}, headers=ANY, timeout=5 ) assert_equal(response.status_code, 200) def test_delete_comment(self, mock_request): self._set_mock_request_data(mock_request, { "user_id": str(self.student.id), "closed": False, }) test_comment_id = "test_comment_id" request = RequestFactory().post("dummy_url", {"id": test_comment_id}) request.user = self.student request.view_name = "delete_comment" response = views.delete_comment(request, course_id=self.course.id.to_deprecated_string(), comment_id=test_comment_id) self.assertEqual(response.status_code, 200) self.assertTrue(mock_request.called) args = mock_request.call_args[0] self.assertEqual(args[0], "delete") self.assertTrue(args[1].endswith("/{}".format(test_comment_id))) def _setup_mock_request(self, mock_request, include_depth=False): """ Ensure that mock_request returns the data necessary to make views function correctly """ mock_request.return_value.status_code = 200 data = { "user_id": str(self.student.id), "closed": False, } if include_depth: data["depth"] = 0 self._set_mock_request_data(mock_request, data) def _test_request_error(self, view_name, view_kwargs, data, mock_request): """ Submit a request against the given view with the given data and ensure that the result is a 400 error and that no data was posted using mock_request """ self._setup_mock_request(mock_request, include_depth=(view_name == "create_sub_comment")) response = self.client.post(reverse(view_name, kwargs=view_kwargs), data=data) self.assertEqual(response.status_code, 400) for call in mock_request.call_args_list: self.assertEqual(call[0][0].lower(), "get") def test_create_thread_no_title(self, mock_request): self._test_request_error( "create_thread", {"commentable_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": "foo"}, mock_request ) def test_create_thread_empty_title(self, mock_request): self._test_request_error( "create_thread", {"commentable_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": "foo", "title": " "}, mock_request ) def test_create_thread_no_body(self, mock_request): self._test_request_error( "create_thread", {"commentable_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"title": "foo"}, mock_request ) def test_create_thread_empty_body(self, mock_request): self._test_request_error( "create_thread", {"commentable_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": " ", "title": "foo"}, mock_request ) def test_update_thread_no_title(self, mock_request): self._test_request_error( "update_thread", {"thread_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": "foo"}, mock_request ) def test_update_thread_empty_title(self, mock_request): self._test_request_error( "update_thread", {"thread_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": "foo", "title": " "}, mock_request ) def test_update_thread_no_body(self, mock_request): self._test_request_error( "update_thread", {"thread_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"title": "foo"}, mock_request ) def test_update_thread_empty_body(self, mock_request): self._test_request_error( "update_thread", {"thread_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": " ", "title": "foo"}, mock_request ) def test_create_comment_no_body(self, mock_request): self._test_request_error( "create_comment", {"thread_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {}, mock_request ) def test_create_comment_empty_body(self, mock_request): self._test_request_error( "create_comment", {"thread_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": " "}, mock_request ) def test_create_sub_comment_no_body(self, mock_request): self._test_request_error( "create_sub_comment", {"comment_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {}, mock_request ) def test_create_sub_comment_empty_body(self, mock_request): self._test_request_error( "create_sub_comment", {"comment_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": " "}, mock_request ) def test_update_comment_no_body(self, mock_request): self._test_request_error( "update_comment", {"comment_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {}, mock_request ) def test_update_comment_empty_body(self, mock_request): self._test_request_error( "update_comment", {"comment_id": "dummy", "course_id": self.course_id.to_deprecated_string()}, {"body": " "}, mock_request ) def test_update_comment_basic(self, mock_request): self._setup_mock_request(mock_request) comment_id = "test_comment_id" updated_body = "updated body" response = self.client.post( reverse( "update_comment", kwargs={"course_id": self.course_id.to_deprecated_string(), "comment_id": comment_id} ), data={"body": updated_body} ) self.assertEqual(response.status_code, 200) mock_request.assert_called_with( "put", "{prefix}/comments/{comment_id}".format(prefix=CS_PREFIX, comment_id=comment_id), headers=ANY, params=ANY, timeout=ANY, data={"body": updated_body} ) def test_flag_thread(self, mock_request): mock_request.return_value.status_code = 200 self._set_mock_request_data(mock_request, { "title": "Hello", "body": "this is a post", "course_id": "MITx/999/Robot_Super_Course", "anonymous": False, "anonymous_to_peers": False, "commentable_id": "i4x-MITx-999-course-Robot_Super_Course", "created_at": "2013-05-10T18:53:43Z", "updated_at": "2013-05-10T18:53:43Z", "at_position_list": [], "closed": False, "id": "518d4237b023791dca00000d", "user_id": "1","username": "robot", "votes": { "count": 0, "up_count": 0, "down_count": 0, "point": 0 }, "abuse_flaggers": [1], "type": "thread", "group_id": None, "pinned": False, "endorsed": False, "unread_comments_count": 0, "read": False, "comments_count": 0, }) url = reverse('flag_abuse_for_thread', kwargs={'thread_id': '518d4237b023791dca00000d', 'course_id': self.course_id.to_deprecated_string()}) response = self.client.post(url) assert_true(mock_request.called) call_list = [ ( ('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'mark_as_read': True, 'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)), { 'data': {'user_id': '1'}, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'mark_as_read': True, 'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ) ] assert_equal(call_list, mock_request.call_args_list) assert_equal(response.status_code, 200) def test_un_flag_thread(self, mock_request): mock_request.return_value.status_code = 200 self._set_mock_request_data(mock_request, { "title": "Hello", "body": "this is a post", "course_id": "MITx/999/Robot_Super_Course", "anonymous": False, "anonymous_to_peers": False, "commentable_id": "i4x-MITx-999-course-Robot_Super_Course", "created_at": "2013-05-10T18:53:43Z", "updated_at": "2013-05-10T18:53:43Z", "at_position_list": [], "closed": False, "id": "518d4237b023791dca00000d", "user_id": "1", "username": "robot", "votes": { "count": 0, "up_count": 0, "down_count": 0, "point": 0 }, "abuse_flaggers": [], "type": "thread", "group_id": None, "pinned": False, "endorsed": False, "unread_comments_count": 0, "read": False, "comments_count": 0 }) url = reverse('un_flag_abuse_for_thread', kwargs={'thread_id': '518d4237b023791dca00000d', 'course_id': self.course_id.to_deprecated_string()}) response = self.client.post(url) assert_true(mock_request.called) call_list = [ ( ('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'mark_as_read': True, 'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)), { 'data': {'user_id': '1'}, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'mark_as_read': True, 'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ) ] assert_equal(call_list, mock_request.call_args_list) assert_equal(response.status_code, 200) def test_flag_comment(self, mock_request): mock_request.return_value.status_code = 200 self._set_mock_request_data(mock_request, { "body": "this is a comment", "course_id": "MITx/999/Robot_Super_Course", "anonymous": False, "anonymous_to_peers": False, "commentable_id": "i4x-MITx-999-course-Robot_Super_Course", "created_at": "2013-05-10T18:53:43Z", "updated_at": "2013-05-10T18:53:43Z", "at_position_list": [], "closed": False, "id": "518d4237b023791dca00000d", "user_id": "1", "username": "robot", "votes": { "count": 0, "up_count": 0, "down_count": 0, "point": 0 }, "abuse_flaggers": [1], "type": "comment", "endorsed": False }) url = reverse('flag_abuse_for_comment', kwargs={'comment_id': '518d4237b023791dca00000d', 'course_id': self.course_id.to_deprecated_string()}) response = self.client.post(url) assert_true(mock_request.called) call_list = [ ( ('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)), { 'data': {'user_id': '1'}, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ) ] assert_equal(call_list, mock_request.call_args_list) assert_equal(response.status_code, 200) def test_un_flag_comment(self, mock_request): mock_request.return_value.status_code = 200 self._set_mock_request_data(mock_request, { "body": "this is a comment", "course_id": "MITx/999/Robot_Super_Course", "anonymous": False, "anonymous_to_peers": False, "commentable_id": "i4x-MITx-999-course-Robot_Super_Course", "created_at": "2013-05-10T18:53:43Z", "updated_at": "2013-05-10T18:53:43Z", "at_position_list": [], "closed": False, "id": "518d4237b023791dca00000d", "user_id": "1", "username": "robot", "votes": { "count": 0, "up_count": 0, "down_count": 0, "point": 0 }, "abuse_flaggers": [], "type": "comment", "endorsed": False }) url = reverse('un_flag_abuse_for_comment', kwargs={'comment_id': '518d4237b023791dca00000d', 'course_id': self.course_id.to_deprecated_string()}) response = self.client.post(url) assert_true(mock_request.called) call_list = [ ( ('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)), { 'data': {'user_id': '1'}, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ), ( ('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)), { 'data': None, 'params': {'request_id': ANY}, 'headers': ANY, 'timeout': 5 } ) ] assert_equal(call_list, mock_request.call_args_list) assert_equal(response.status_code, 200) @patch("lms.lib.comment_client.utils.requests.request") @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class ViewPermissionsTestCase(UrlResetMixin, ModuleStoreTestCase, MockRequestSetupMixin): @patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(ViewPermissionsTestCase, self).setUp() self.password = "test password" self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create(password=self.password) self.moderator = UserFactory.create(password=self.password) CourseEnrollmentFactory(user=self.student, course_id=self.course.id) CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id) self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id)) def test_pin_thread_as_student(self, mock_request): self._set_mock_request_data(mock_request, {}) self.client.login(username=self.student.username, password=self.password) response = self.client.post( reverse("pin_thread", kwargs={"course_id": self.course.id.to_deprecated_string(), "thread_id": "dummy"}) ) self.assertEqual(response.status_code, 401) def test_pin_thread_as_moderator(self, mock_request): self._set_mock_request_data(mock_request, {}) self.client.login(username=self.moderator.username, password=self.password) response = self.client.post( reverse("pin_thread", kwargs={"course_id": self.course.id.to_deprecated_string(), "thread_id": "dummy"}) ) self.assertEqual(response.status_code, 200) def test_un_pin_thread_as_student(self, mock_request): self._set_mock_request_data(mock_request, {}) self.client.login(username=self.student.username, password=self.password) response = self.client.post( reverse("un_pin_thread", kwargs={"course_id": self.course.id.to_deprecated_string(), "thread_id": "dummy"}) ) self.assertEqual(response.status_code, 401) def test_un_pin_thread_as_moderator(self, mock_request): self._set_mock_request_data(mock_request, {}) self.client.login(username=self.moderator.username, password=self.password) response = self.client.post( reverse("un_pin_thread", kwargs={"course_id": self.course.id.to_deprecated_string(), "thread_id": "dummy"}) ) self.assertEqual(response.status_code, 200) def _set_mock_request_thread_and_comment(self, mock_request, thread_data, comment_data): def handle_request(*args, **kwargs): url = args[1] if "/threads/" in url: return self._create_repsonse_mock(thread_data) elif "/comments/" in url: return self._create_repsonse_mock(comment_data) else: raise ArgumentError("Bad url to mock request") mock_request.side_effect = handle_request def test_endorse_response_as_staff(self, mock_request): self._set_mock_request_thread_and_comment( mock_request, {"type": "thread", "thread_type": "question", "user_id": str(self.student.id)}, {"type": "comment", "thread_id": "dummy"} ) self.client.login(username=self.moderator.username, password=self.password) response = self.client.post( reverse("endorse_comment", kwargs={"course_id": self.course.id.to_deprecated_string(), "comment_id": "dummy"}) ) self.assertEqual(response.status_code, 200) def test_endorse_response_as_student(self, mock_request): self._set_mock_request_thread_and_comment( mock_request, {"type": "thread", "thread_type": "question", "user_id": str(self.moderator.id)}, {"type": "comment", "thread_id": "dummy"} ) self.client.login(username=self.student.username, password=self.password) response = self.client.post( reverse("endorse_comment", kwargs={"course_id": self.course.id.to_deprecated_string(), "comment_id": "dummy"}) ) self.assertEqual(response.status_code, 401) def test_endorse_response_as_student_question_author(self, mock_request): self._set_mock_request_thread_and_comment( mock_request, {"type": "thread", "thread_type": "question", "user_id": str(self.student.id)}, {"type": "comment", "thread_id": "dummy"} ) self.client.login(username=self.student.username, password=self.password) response = self.client.post( reverse("endorse_comment", kwargs={"course_id": self.course.id.to_deprecated_string(), "comment_id": "dummy"}) ) self.assertEqual(response.status_code, 200) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class CreateThreadUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin): def setUp(self): self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create() CourseEnrollmentFactory(user=self.student, course_id=self.course.id) @patch('lms.lib.comment_client.utils.requests.request') def _test_unicode_data(self, text, mock_request): self._set_mock_request_data(mock_request, {}) request = RequestFactory().post("dummy_url", {"thread_type": "discussion", "body": text, "title": text}) request.user = self.student request.view_name = "create_thread" response = views.create_thread(request, course_id=self.course.id.to_deprecated_string(), commentable_id="test_commentable") self.assertEqual(response.status_code, 200) self.assertTrue(mock_request.called) self.assertEqual(mock_request.call_args[1]["data"]["body"], text) self.assertEqual(mock_request.call_args[1]["data"]["title"], text) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class UpdateThreadUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin): def setUp(self): self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create() CourseEnrollmentFactory(user=self.student, course_id=self.course.id) @patch('lms.lib.comment_client.utils.requests.request') def _test_unicode_data(self, text, mock_request): self._set_mock_request_data(mock_request, { "user_id": str(self.student.id), "closed": False, }) request = RequestFactory().post("dummy_url", {"body": text, "title": text}) request.user = self.student request.view_name = "update_thread" response = views.update_thread(request, course_id=self.course.id.to_deprecated_string(), thread_id="dummy_thread_id") self.assertEqual(response.status_code, 200) self.assertTrue(mock_request.called) self.assertEqual(mock_request.call_args[1]["data"]["body"], text) self.assertEqual(mock_request.call_args[1]["data"]["title"], text) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class CreateCommentUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin): def setUp(self): self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create() CourseEnrollmentFactory(user=self.student, course_id=self.course.id) @patch('lms.lib.comment_client.utils.requests.request') def _test_unicode_data(self, text, mock_request): self._set_mock_request_data(mock_request, { "closed": False, }) request = RequestFactory().post("dummy_url", {"body": text}) request.user = self.student request.view_name = "create_comment" response = views.create_comment(request, course_id=self.course.id.to_deprecated_string(), thread_id="dummy_thread_id") self.assertEqual(response.status_code, 200) self.assertTrue(mock_request.called) self.assertEqual(mock_request.call_args[1]["data"]["body"], text) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class UpdateCommentUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin): def setUp(self): self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create() CourseEnrollmentFactory(user=self.student, course_id=self.course.id) @patch('lms.lib.comment_client.utils.requests.request') def _test_unicode_data(self, text, mock_request): self._set_mock_request_data(mock_request, { "user_id": str(self.student.id), "closed": False, }) request = RequestFactory().post("dummy_url", {"body": text}) request.user = self.student request.view_name = "update_comment" response = views.update_comment(request, course_id=self.course.id.to_deprecated_string(), comment_id="dummy_comment_id") self.assertEqual(response.status_code, 200) self.assertTrue(mock_request.called) self.assertEqual(mock_request.call_args[1]["data"]["body"], text) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class CreateSubCommentUnicodeTestCase(ModuleStoreTestCase, UnicodeTestMixin, MockRequestSetupMixin): def setUp(self): self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create() CourseEnrollmentFactory(user=self.student, course_id=self.course.id) @patch('lms.lib.comment_client.utils.requests.request') def _test_unicode_data(self, text, mock_request): self._set_mock_request_data(mock_request, { "closed": False, "depth": 1, }) request = RequestFactory().post("dummy_url", {"body": text}) request.user = self.student request.view_name = "create_sub_comment" response = views.create_sub_comment(request, course_id=self.course.id.to_deprecated_string(), comment_id="dummy_comment_id") self.assertEqual(response.status_code, 200) self.assertTrue(mock_request.called) self.assertEqual(mock_request.call_args[1]["data"]["body"], text) @override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) class UsersEndpointTestCase(ModuleStoreTestCase, MockRequestSetupMixin): def set_post_counts(self, mock_request, threads_count=1, comments_count=1): """ sets up a mock response from the comments service for getting post counts for our other_user """ self._set_mock_request_data(mock_request, { "threads_count": threads_count, "comments_count": comments_count, }) def setUp(self): self.course = CourseFactory.create() seed_permissions_roles(self.course.id) self.student = UserFactory.create() self.enrollment = CourseEnrollmentFactory(user=self.student, course_id=self.course.id) self.other_user = UserFactory.create(username="other") CourseEnrollmentFactory(user=self.other_user, course_id=self.course.id) def make_request(self, method='get', course_id=None, **kwargs): course_id = course_id or self.course.id request = getattr(RequestFactory(), method)("dummy_url", kwargs) request.user = self.student request.view_name = "users" return views.users(request, course_id=course_id.to_deprecated_string()) @patch('lms.lib.comment_client.utils.requests.request') def test_finds_exact_match(self, mock_request): self.set_post_counts(mock_request) response = self.make_request(username="other") self.assertEqual(response.status_code, 200) self.assertEqual( json.loads(response.content)["users"], [{"id": self.other_user.id, "username": self.other_user.username}] ) @patch('lms.lib.comment_client.utils.requests.request') def test_finds_no_match(self, mock_request): self.set_post_counts(mock_request) response = self.make_request(username="othor") self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content)["users"], []) def test_requires_GET(self): response = self.make_request(method='post', username="other") self.assertEqual(response.status_code, 405) def test_requires_username_param(self): response = self.make_request() self.assertEqual(response.status_code, 400) content = json.loads(response.content) self.assertIn("errors", content) self.assertNotIn("users", content) def test_course_does_not_exist(self): course_id = SlashSeparatedCourseKey.from_deprecated_string("does/not/exist") response = self.make_request(course_id=course_id, username="other") self.assertEqual(response.status_code, 404) content = json.loads(response.content) self.assertIn("errors", content) self.assertNotIn("users", content) def test_requires_requestor_enrolled_in_course(self): # unenroll self.student from the course. self.enrollment.delete() response = self.make_request(username="other") self.assertEqual(response.status_code, 404) content = json.loads(response.content) self.assertTrue(content.has_key("errors")) self.assertFalse(content.has_key("users")) @patch('lms.lib.comment_client.utils.requests.request') def test_requires_matched_user_has_forum_content(self, mock_request): self.set_post_counts(mock_request, 0, 0) response = self.make_request(username="other") self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content)["users"], [])
""" Key-value store that holds XBlock field data read out of Blockstore """ from collections import namedtuple from weakref import WeakKeyDictionary import logging from xblock.exceptions import InvalidScopeError, NoSuchDefinition from xblock.fields import Field, BlockScope, Scope, UserScope, Sentinel from xblock.field_data import FieldData from openedx.core.djangoapps.xblock.learning_context.manager import get_learning_context_impl from openedx.core.djangolib.blockstore_cache import ( get_bundle_version_files_cached, get_bundle_draft_files_cached, ) log = logging.getLogger(__name__) ActiveBlock = namedtuple('ActiveBlock', ['olx_hash', 'changed_fields']) DELETED = Sentinel('DELETED') # Special value indicating a field was reset to its default value CHILDREN_INCLUDES = Sentinel('CHILDREN_INCLUDES') # Key for a pseudo-field that stores the XBlock's children info MAX_DEFINITIONS_LOADED = 100 # How many of the most recently used XBlocks' field data to keep in memory at max. class BlockInstanceUniqueKey(object): """ An empty object used as a unique key for each XBlock instance, see get_weak_key_for_block() and BlockstoreFieldData._get_active_block(). Every XBlock instance will get a unique one of these keys, even if they are otherwise identical. Its purpose is similar to `id(block)`. """ def get_weak_key_for_block(block): """ Given an XBlock instance, return an object with the same lifetime as the block, suitable as a key to hold block-specific data in a WeakKeyDictionary. """ # We would like to make the XBlock instance 'block' itself the key of # BlockstoreFieldData.active_blocks, so that we have exactly one entry per # XBlock instance in memory, and they'll each be automatically freed by the # WeakKeyDictionary as needed. But because XModules implement # __eq__() in a way that reads all field values, just attempting to use # the block as a dict key here will trigger infinite recursion. So # instead we key the dict on an arbitrary object, # block key = BlockInstanceUniqueKey() which we create here. That way # the weak reference will still cause the entry in the WeakKeyDictionary to # be freed automatically when the block is no longer needed, and we # still get one entry per XBlock instance. if not hasattr(block, '_field_data_key_obj'): block._field_data_key_obj = BlockInstanceUniqueKey() # pylint: disable=protected-access return block._field_data_key_obj # pylint: disable=protected-access def get_olx_hash_for_definition_key(def_key): """ Given a BundleDefinitionLocator, which identifies a specific version of an OLX file, return the hash of the OLX file as given by the Blockstore API. """ if def_key.bundle_version: # This is referring to an immutable file (BundleVersions are immutable so this can be aggressively cached) files_list = get_bundle_version_files_cached(def_key.bundle_uuid, def_key.bundle_version) else: # This is referring to a draft OLX file which may be recently updated: files_list = get_bundle_draft_files_cached(def_key.bundle_uuid, def_key.draft_name) for entry in files_list: if entry.path == def_key.olx_path: return entry.hash_digest raise NoSuchDefinition("Could not load OLX file for key {}".format(def_key)) class BlockstoreFieldData(FieldData): """ An XBlock FieldData implementation that reads XBlock field data directly out of Blockstore. It requires that every XBlock have a BundleDefinitionLocator as its "definition key", since the BundleDefinitionLocator is what specifies the OLX file path and version to use. Within Blockstore there is no mechanism for setting different field values at the usage level compared to the definition level, so we treat usage-scoped fields identically to definition-scoped fields. """ def __init__(self): """ Initialize this BlockstoreFieldData instance. """ # loaded definitions: a dict where the key is the hash of the XBlock's # olx file (as stated by the Blockstore API), and the values is the # dict of field data as loaded from that OLX file. The field data dicts # in this should be considered immutable, and never modified. self.loaded_definitions = {} # Active blocks: this holds the field data *changes* for all the XBlocks # that are currently in memory being used for something. We only keep a # weak reference so that the memory will be freed when the XBlock is no # longer needed (e.g. at the end of a request) # The key of this dictionary is on ID object owned by the XBlock itself # (see _get_active_block()) and the value is an ActiveBlock object # (which holds olx_hash and changed_fields) self.active_blocks = WeakKeyDictionary() super(BlockstoreFieldData, self).__init__() # lint-amnesty, pylint: disable=super-with-arguments def _getfield(self, block, name): """ Return the field with the given `name` from `block`. If the XBlock doesn't have such a field, raises a KeyError. """ # First, get the field from the class, if defined block_field = getattr(block.__class__, name, None) if block_field is not None and isinstance(block_field, Field): return block_field # Not in the class, so name really doesn't name a field raise KeyError(name) def _check_field(self, block, name): """ Given a block and the name of one of its fields, check that we will be able to read/write it. """ if name == CHILDREN_INCLUDES: return # This is a pseudo-field used in conjunction with BlockstoreChildrenData field = self._getfield(block, name) if field.scope in (Scope.children, Scope.parent): # lint-amnesty, pylint: disable=no-else-raise # This field data store is focused on definition-level field data, and children/parent is mostly # relevant at the usage level. Scope.parent doesn't even seem to be used? raise NotImplementedError("Setting Scope.children/parent is not supported by BlockstoreFieldData.") else: if field.scope.user != UserScope.NONE: raise InvalidScopeError("BlockstoreFieldData only supports UserScope.NONE fields") if field.scope.block not in (BlockScope.DEFINITION, BlockScope.USAGE): raise InvalidScopeError( "BlockstoreFieldData does not support BlockScope.{} fields".format(field.scope.block) ) # There is also BlockScope.TYPE but we don't need to support that; # it's mostly relevant as Scope.preferences(UserScope.ONE, BlockScope.TYPE) # Which would be handled by a user-aware FieldData implementation def _get_active_block(self, block): """ Get the ActiveBlock entry for the specified block, creating it if necessary. """ key = get_weak_key_for_block(block) if key not in self.active_blocks: self.active_blocks[key] = ActiveBlock( olx_hash=get_olx_hash_for_definition_key(block.scope_ids.def_id), changed_fields={}, ) return self.active_blocks[key] def get(self, block, name): """ Get the given field value from Blockstore If the XBlock has been making changes to its fields, the value will be in self._get_active_block(block).changed_fields[name] Otherwise, the value comes from self.loaded_definitions which is a dict of OLX file field data, keyed by the hash of the OLX file. """ self._check_field(block, name) entry = self._get_active_block(block) if name in entry.changed_fields: value = entry.changed_fields[name] if value == DELETED: raise KeyError # KeyError means use the default value, since this field was deliberately set to default return value try: saved_fields = self.loaded_definitions[entry.olx_hash] except KeyError: if name == CHILDREN_INCLUDES: # Special case: parse_xml calls add_node_as_child which calls 'block.children.append()' # BEFORE parse_xml is done, and .append() needs to read the value of children. So return [] # start with an empty list, it will get filled in. # Otherwise, this is an anomalous get() before the XML was fully loaded: # This could happen if an XBlock's parse_xml() method tried to read a field before setting it, # if an XBlock read field data in its constructor (forbidden), or if an XBlock was loaded via # some means other than runtime.get_block(). One way this can happen is if you log/print an XBlock during # XML parsing, because ScopedStorageMixin.__repr__ will try to print all field values, and any fields which # aren't mentioned in the XML (which are left at their default) will be "not loaded yet." log.exception( "XBlock %s tried to read from field data (%s) that wasn't loaded from Blockstore!", block.scope_ids.usage_id, name, ) raise # Just use the default value for now; any exception raised here is caught anyways return saved_fields[name] # If 'name' is not found, this will raise KeyError, which means to use the default value def set(self, block, name, value): """ Set the value of the field named `name` """ entry = self._get_active_block(block) entry.changed_fields[name] = value def delete(self, block, name): """ Reset the value of the field named `name` to the default """ self.set(block, name, DELETED) def default(self, block, name): """ Get the default value for block's field 'name'. The XBlock class will provide the default if KeyError is raised; this is mostly for the purpose of context-specific overrides. """ raise KeyError(name) def cache_fields(self, block): """ Cache field data: This is called by the runtime after a block has parsed its OLX via its parse_xml() methods and written all of its field values into this field data store. The values will be stored in self._get_active_block(block).changed_fields so we know at this point that that isn't really "changed" field data, it's the result of parsing the OLX. Save a copy into loaded_definitions. """ entry = self._get_active_block(block) self.loaded_definitions[entry.olx_hash] = entry.changed_fields.copy() # Reset changed_fields to indicate this block hasn't actually made any field data changes, just loaded from XML: entry.changed_fields.clear() if len(self.loaded_definitions) > MAX_DEFINITIONS_LOADED: self.free_unused_definitions() def has_changes(self, block): """ Does the specified block have any unsaved changes? """ entry = self._get_active_block(block) return bool(entry.changed_fields) def has_cached_definition(self, definition_key): """ Has the specified OLX file been loaded into memory? """ olx_hash = get_olx_hash_for_definition_key(definition_key) return olx_hash in self.loaded_definitions def free_unused_definitions(self): """ Free unused field data cache entries from self.loaded_definitions as long as they're not in use. """ olx_hashes = set(self.loaded_definitions.keys()) olx_hashes_needed = set(entry.olx_hash for entry in self.active_blocks.values()) olx_hashes_safe_to_delete = olx_hashes - olx_hashes_needed # To avoid doing this too often, randomly cull unused entries until # we have only half as many as MAX_DEFINITIONS_LOADED in memory, if possible. while olx_hashes_safe_to_delete and (len(self.loaded_definitions) > MAX_DEFINITIONS_LOADED / 2): del self.loaded_definitions[olx_hashes_safe_to_delete.pop()] class BlockstoreChildrenData(FieldData): """ An XBlock FieldData implementation that reads 'children' data out of the definition fields in BlockstoreFieldData. The children field contains usage keys and so is usage-specific; the BlockstoreFieldData can only store field data that is not usage-specific. So we store data about the <xblock-include /> elements that define the children in BlockstoreFieldData (since that is not usage-specific), and this field data implementation loads that <xblock-include /> data and transforms it into the usage keys that comprise the standard .children field. """ def __init__(self, blockstore_field_data): """ Initialize this BlockstoreChildrenData instance. """ # The data store that holds Scope.usage and Scope.definition data: self.authored_data_store = blockstore_field_data super(BlockstoreChildrenData, self).__init__() # lint-amnesty, pylint: disable=super-with-arguments def _check_field(self, block, name): # pylint: disable=unused-argument """ Given a block and the name of one of its fields, check that we will be able to read/write it. """ if name != 'children': raise InvalidScopeError("BlockstoreChildrenData can only read/write from a field named 'children'") def get(self, block, name): """ Get the "children' field value. We do this by reading the parsed <xblock-include /> values from the regular authored data store and then transforming them to usage IDs. """ self._check_field(block, name) children_includes = self.get_includes(block) if not children_includes: return [] # Now the .children field is required to be a list of usage IDs: learning_context = get_learning_context_impl(block.scope_ids.usage_id) child_usages = [] for parsed_include in children_includes: child_usages.append( learning_context.usage_for_child_include( block.scope_ids.usage_id, block.scope_ids.def_id, parsed_include, ) ) return child_usages def set(self, block, name, value): """ Set the value of the field; requires name='children' """ self._check_field(block, name) children_includes = self.authored_data_store.get(block, CHILDREN_INCLUDES) if len(value) != len(children_includes): raise RuntimeError( "This runtime does not allow changing .children directly - use runtime.add_child_include instead." ) # This is a no-op; the value of 'children' is derived from CHILDREN_INCLUDES # so we never write to the children field directly. All we do is make sure it # looks like it's still in sync with CHILDREN_INCLUDES def get_includes(self, block): """ Get the list of <xblock-include /> elements representing this XBlock's children. """ try: return self.authored_data_store.get(block, CHILDREN_INCLUDES) except KeyError: # KeyError raised by an XBlock field data store means "use the # default value", and the default value for the children field is an # empty list. return [] def append_include(self, block, parsed_include): """ Append an <xblock-include /> element to this XBlock's list of children """ self.authored_data_store.set(block, CHILDREN_INCLUDES, self.get_includes(block) + [parsed_include]) def delete(self, block, name): """ Reset the value of the field named `name` to the default """ self._check_field(block, name) self.authored_data_store.set(block, CHILDREN_INCLUDES, []) self.set(block, name, [])
import logging from yithlibraryserver.user.analytics import get_google_analytics from yithlibraryserver.user.gravatar import get_gravatar from yithlibraryserver.user.idp import add_identity_provider from yithlibraryserver.user.models import User, ExternalIdentity from yithlibraryserver.user.security import get_user logger = logging.getLogger(__name__) def includeme(config): config.add_directive('add_identity_provider', add_identity_provider) config.add_request_method(get_user, 'user', reify=True) config.add_request_method(get_google_analytics, 'google_analytics', reify=True) config.add_request_method(get_gravatar, 'gravatar', reify=True) config.add_route('login', '/login') config.add_route('register_new_user', '/register') config.add_route('logout', '/logout') config.add_route('user_destroy', '/destroy') config.add_route('user_information', '/profile') config.add_route('user_preferences', '/preferences') config.add_route('user_identity_providers', '/identity-providers') config.add_route('user_send_email_verification_code', '/send-email-verification-code') config.add_route('user_verify_email', '/verify-email') config.add_route('user_google_analytics_preference', '/google-analytics-preference') config.add_route('user_view', '/user') logger.debug('Importing %s model so SQLAlchemy knows about it', User) logger.debug('Importing %s model so SQLAlchemy knows about it', ExternalIdentity)
import models import wizard
from django.core.urlresolvers import reverse, NoReverseMatch from django.forms.utils import flatatt from django.utils.html import escape, format_html from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from .html import join_text, merge_new_words, render_tag def render_icon(icon, title=''): """Render a glyphicon. """ #? escape ? # attrs = {'class': 'glyphicon glyphicon-{}'.format(icon)} attrs = {'class': 'glyphicon glyphicon-' + icon} if title: attrs['title'] = _(title) return format_html('<span{0}></span>', flatatt(attrs)) def render_button(content, icon=None, style='default', size='', href='', title='', button_class='', attrs=None): """Render a button with content """ # <button type="button" class="btn btn-default">Default</button> # <button type="button" class="btn btn-primary">Primary</button> # <button type="button" class="btn btn-success">Success</button> # <button type="button" class="btn btn-info">Info</button> # <button type="button" class="btn btn-warning">Warning</button> # <button type="button" class="btn btn-danger">Danger</button> # <button type="button" class="btn btn-link">Link</button> # # size : btn-lg, btn-sm, btn-xs # <button type="button" class="btn btn-primary btn-lg">Large button</button> # # btn-block # <button type="button" class="btn btn-primary btn-lg btn-block">Block level button</button> # <button type="button" class="btn btn-default btn-lg btn-block">Block level button</button> # # active # <button type="button" class="btn btn-primary btn-lg active">Primary button</button> # <a href="#" class="btn btn-default btn-lg active" role="button">Link</a> # # disabled="disabled" # <button type="button" class="btn btn-lg btn-primary" disabled="disabled">Primary button</button> # <a href="#" class="btn btn-default btn-lg disabled" role="button">Link</a> # # <a class="btn btn-default" href="#" role="button">Link</a> # <button class="btn btn-default" type="submit">Button</button> # <input class="btn btn-default" type="button" value="Input"> # <input class="btn btn-default" type="submit" value="Submit"> if attrs is None: attrs = {} classes = ['btn'] button_styles = ('default', 'primary', 'success', 'info', 'warning', 'danger', 'link') if style in button_styles: classes.append('btn-' + style) else: raise ValueError('Parameter style must be {} ("{}" given)', ', '.join(button_styles), style) # size = text_value(size).lower().strip() if size: if size == 'xs': classes.append('btn-xs') elif size == 'sm' or size == 'small': classes.append('btn-sm') elif size == 'lg' or size == 'large': classes.append('btn-lg') else: raise ValueError('Parameter "size" should be "xs", "sm", "lg" or empty ("{}" given)', format(size)) attrs['class'] = merge_new_words(button_class, classes) if href: try: # current_app = context['request'].resolver_match.namespace # viewname=viewname, args=view_args, kwargs=view_kwargs, current_app=current_app url = reverse(href) except NoReverseMatch: url = href attrs['href'] = url tag = 'a' else: tag = 'button' if title: attrs['title'] = escape(_(title)) icon_content = render_icon(icon) if icon else '' if content: content = join_text((icon_content, escape(_(content))), separator=' ') else: content = icon_content return render_tag(tag, mark_safe(content), attrs=attrs) def render_icon_button(icon, **kwargs): return render_button(None, icon=icon, **kwargs) def render_modal_icon_button(icon, *args, **kwargs): attrs = {'data-toggle':'modal', 'data-target':join_text(args)} return render_button(None, icon=icon, attrs=attrs, **kwargs) def render_dismiss_button(title, **kwargs): attrs = {'type':'button', 'data-dismiss':'modal'} return render_button(title, attrs=attrs, **kwargs) def render_close_button(*args, **kwargs): # '<button type="button" class="close" data-dismiss="modal">' # '</button>' attrs = {'type':'button', 'class':'close', 'data-dismiss':'modal'} title = escape(_('Close')) content = ('<span aria-hidden="true">&times;</span>' '<span class="sr-only">{0}</span>'.format(title)) return render_tag('button', mark_safe(content), attrs=attrs)
{ 'name': 'Purchase Price List Item', 'version': '7.0.1.0.0', 'category': 'Purchase', 'sequence': 19, 'summary': 'Purchase Price List Item', 'description': """ Improve purchase price managment ================================ * In Purchase List Item, the price is fixed based on price_surchage if base is 'fixed on UOP' * If 'fixed on UOP', if product UOP change, the price list price will be change automtically. * Add field 'Qty on Hand', and 'Stock Values' for product * Add field 'Qty on Hand', 'Stock Values', UOP in product list view """, 'author': 'Elico Corp', 'website': 'https://www.elico-corp.com', 'images' : [], 'depends': ['purchase'], 'data': [ 'purchase_view.xml', ], 'test': [], 'demo': [], 'installable': True, 'auto_install': False, 'application': False, }
from flask import Flask, request from flask_bootstrap import Bootstrap from flask_babel import Babel import agherant from webserver_utils import gevent_run def create_app(conf): app = Flask(__name__) app.config.update(conf) Bootstrap(app) babel = Babel(app) app.register_blueprint(agherant.agherant, url_prefix='/agherant') @babel.localeselector def get_locale(): return request.accept_languages.best_match(['en', 'it', 'sq']) return app def main(conf={}): app = create_app(conf) gevent_run(app) if __name__ == '__main__': main()
import json from decimal import Decimal from django import forms class MoneyField(forms.DecimalField): def __init__(self, **kwargs): kwargs["decimal_places"] = 2 for f in ["min_value", "max_value"]: if f in kwargs: kwargs[f] = Decimal(kwargs[f]) / 100 super().__init__(**kwargs) def prepare_value(self, value): if isinstance(value, int): return Decimal(value) / 100 return value def clean(self, value): value = super().clean(value) return value and int(value * 100) class AskAmountField(forms.DecimalField): def __init__( self, *, amount_choices=None, show_tax_credit=True, by_month=False, **kwargs ): self.show_tax_credit = show_tax_credit self.by_month = by_month self._amount_choices = amount_choices super().__init__(**kwargs) if self.min_value is not None: self.widget.attrs.setdefault( "data-min-amount-error", self.error_messages["min_value"] ) if self.max_value is not None: self.widget.attrs.setdefault( "data-max-amount-error", self.error_messages["max_value"] ) self.widget.attrs.setdefault("data-by-month", self.by_month) @property def amount_choices(self): return self._amount_choices @amount_choices.setter def amount_choices(self, amount_choices): self._amount_choices = amount_choices if self.widget: self.widget.attrs["data-amount-choices"] = json.dumps(self._amount_choices) def widget_attrs(self, widget): attrs = super().widget_attrs(widget) if not self.show_tax_credit: attrs.setdefault("data-hide-tax-credit", "Y") if self.amount_choices is not None: attrs.setdefault("data-amount-choices", json.dumps(self.amount_choices)) return attrs
""" OpenStack - Tests """ import requests from collections import namedtuple from unittest.mock import Mock, call, patch from instance import openstack from instance.tests.base import TestCase class OpenStackTestCase(TestCase): """ Test cases for OpenStack helper functions """ def setUp(self): super().setUp() self.nova = Mock() def test_create_server(self): """ Create a VM via nova """ self.nova.flavors.find.return_value = 'test-flavor' self.nova.images.find.return_value = 'test-image' openstack.create_server(self.nova, 'test-vm', {"ram": 4096, "disk": 40}, {"name": "Ubuntu 12.04"}) self.assertEqual(self.nova.mock_calls, [ call.flavors.find(disk=40, ram=4096), call.images.find(name='Ubuntu 12.04'), call.servers.create('test-vm', 'test-image', 'test-flavor', key_name=None) ]) def test_delete_servers_by_name(self): """ Delete all servers with a given name """ server_class = namedtuple('server_class', 'name pk') self.nova.servers.list.return_value = [ server_class(name='server-a', pk=1), server_class(name='server-a', pk=2), server_class(name='server-b', pk=3), ] openstack.delete_servers_by_name(self.nova, 'server-a') self.assertEqual(self.nova.mock_calls, [ call.servers.list(), call.servers.delete(server_class(name='server-a', pk=1)), call.servers.delete(server_class(name='server-a', pk=2)), ]) def test_get_server_public_address_none(self): """ No public IP when none has been assigned yet """ server_class = namedtuple('Server', 'addresses') server = server_class(addresses=[]) self.assertEqual(openstack.get_server_public_address(server), None) @patch('requests.packages.urllib3.util.retry.Retry.sleep') @patch('http.client.HTTPConnection.getresponse') @patch('http.client.HTTPConnection.request') def test_nova_client_connection_error(self, mock_request, mock_getresponse, mock_retry_sleep): """ Connection error during a request from the nova client Ensure requests are retried before giving up, with a backoff sleep between attempts """ def getresponse_call(*args, **kwargs): """ Invoked by the nova client when making a HTTP request (via requests/urllib3) """ raise ConnectionResetError('[Errno 104] Connection reset by peer') mock_getresponse.side_effect = getresponse_call nova = openstack.get_nova_client() with self.assertRaises(requests.exceptions.ConnectionError): nova.servers.get('test-id') self.assertEqual(mock_getresponse.call_count, 11) self.assertEqual(mock_retry_sleep.call_count, 10)
from UM.Math.Float import Float # For fuzzy comparison of edge cases. class LineSegment(object): ## Creates a new line segment with the specified endpoints. # # \param endpoint_a An endpoint of the line segment. # \param endpoint_b An endpoint of the line segment. def __init__(self, endpoint_a, endpoint_b): self._endpoint_a = endpoint_a self._endpoint_b = endpoint_b ## Gets the second endpoint (B) of the line segment. # # \return The second endpoint of the line segment. def getEnd(self): return self._endpoint_b ## Gets the first endpoint (A) of the line segment. # # \return The first endpoint of the line segment. def getStart(self): return self._endpoint_a ## Returns the point of intersection of this line segment with another line # segment, if any. # # \param other The line segment to check intersection with. # \return The intersection point if they intersect, or None otherwise. def intersection(self, other): if not self.intersectsWithLine(other._endpoint_a, other._endpoint_b) or not other.intersectsWithLine(self._endpoint_a, self._endpoint_b): #Line segments don't intersect. return None direction_me = self._endpoint_b - self._endpoint_a direction_other = other._endpoint_b - other._endpoint_a diff_endpoint_a = self._endpoint_a - other._endpoint_a perpendicular = direction_me.perpendicular() denominator = perpendicular.dot(direction_other) #Project onto the perpendicular. numerator = perpendicular.dot(diff_endpoint_a) if denominator == 0: #Lines are parallel. return None return (numerator / denominator.astype(float)) * direction_other + other._endpoint_a ## Returns whether the line segment intersects the specified (infinite) # line. # # If the line segment touches the line with one or both endpoints, that # counts as an intersection too. # # \param a A point on the line to intersect with. # \param b A different point on the line to intersect with. # \return True if the line segment intersects with the line, or False # otherwise. def intersectsWithLine(self, a, b): shifted_b = b - a #It intersects if either endpoint is on the line, or if one endpoint is on the right but the other is not. return Float.fuzzyCompare(shifted_b.cross(self._endpoint_a), 0) or Float.fuzzyCompare(shifted_b.cross(self._endpoint_b), 0) or (self._pointIsRight(self._endpoint_a, a, b) != self._pointIsRight(self._endpoint_b, a, b)) ## Determines whether point p is to the right of the line through a and b. # # \param p The point to determine whether it is to the right of the line. # \param a A point on the line. # \param b Another point on the line. def _pointIsRight(self, p, a, b): shifted_end = b - a return shifted_end.cross(p - a) < 0
import os SITE_TITLE = 'Job Board' SQLALCHEMY_DATABASE_URI = 'postgresql:///hasjob_testing' SERVER_NAME = 'hasjob.travis.local:5000' LASTUSER_SERVER = 'https://hasgeek.com/' LASTUSER_CLIENT_ID = os.environ.get('LASTUSER_CLIENT_ID', '') LASTUSER_CLIENT_SECRET = os.environ.get('LASTUSER_CLIENT_SECRET', '') STATIC_SUBDOMAIN = 'static' ASSET_SERVER = 'https://static.hasgeek.co.in/' ASSET_MANIFEST_PATH = "static/build/manifest.json" ASSET_BASE_PATH = '/static/build'
from .pagemodels import * from .catalogmodels import * from .utilmodels import * from .usermodels import * from .dbconnect import Base, engine def init_models(): Base.metadata.create_all(engine)
import configparser import datetime import json import os.path import re import requests import youtube_dl def read_config(section, key): config = configparser.ConfigParser() config.read('config.cfg') return config[section][key] def is_invalid(date): try: datetime.datetime.strptime(date, '%Y-%m-%d') except ValueError: return "Incorrect date format, should be YYYY-MM-DD." else: return False class Apod: def __init__(self, *args): self.api_key = read_config('NASA_API', 'api_key') if args: self.date = args[0] else: self.date = '' self.date = self.ask_api()['date'] self.filename = 'data/' + self.date self.error = False self.consult() if not self.error: self.title = self.api_response['title'] self.media_type = self.api_response['media_type'] if self.media_type == 'image': self.hdlink = self.api_response['hdurl'] self.link = self.api_response['url'] self.explanation() def ask_api(self): baseurl = 'https://api.nasa.gov/planetary/apod' payload = {'api_key': self.api_key, 'date': self.date} r = requests.get(baseurl, params=payload) return r.json() def consult(self): if os.path.exists('data/' + self.date + '.json'): with open(self.filename + '.json', 'rt') as f: self.api_response = json.load(f) else: self.api_response = self.ask_api() if 'code' in self.api_response: if self.api_response['code'] == 400: self.error = self.api_response['msg'] else: self.error = self.api_response['code'] + ': ' + self.api_response['msg'] else: with open(self.filename + '.json', 'wt') as f: json.dump(self.api_response, f) def get_userpage(self): shortdate = self.date.replace('-', '') shortdate = shortdate[2:] url = 'https://apod.nasa.gov/apod/ap' + shortdate + '.html' payload = {} r = requests.get(url, params=payload) return r.text def scrap_explanation(self, pagesource): re_explanation = re.compile("Explanation: </b>(.*?)<p>", flags=re.DOTALL) # Compile regex for extracting explanation. explanation = re_explanation.search(pagesource).groups()[0] # Extract explanation. explanation = explanation.replace('/\n', '/') # Fix split URLs along several lines. explanation = explanation.replace('\n>', '>') # Fix split HTML tags. explanation = explanation.replace('<a/>', '</a>') # Fix typos (they seem to write the HTML by hand, yes). explanation = explanation.replace('\n', ' ') # Delete all newlines. explanation = re.sub('\s+', ' ', explanation).strip() # Substitute repeated spaces and strips the ones at the beginning and the end of the string. explanation = re.sub(r'<a([^>]*)href=["\'](?!http)([^"\']*)["\']([^>]*)>', r'<a\1href="https://apod.nasa.gov/apod/\2"\3>', explanation) # Change relative paths to absolute. return explanation def save_explanation(self, explanation): with open(self.filename + '.html', 'wt') as f: f.write(explanation) def explanation(self): filename = self.filename + '.html' if os.path.exists(filename): with open(filename, 'rt') as f: self.explanation = f.read() self.html = True else: try: userpage = self.get_userpage() explanation = self.scrap_explanation(userpage) except: explanation = self.api_response['explanation'] self.html = False else: self.save_explanation(explanation) self.html = True self.explanation = explanation
from essentia_test import * import numpy as np class TestTriangularBarkBands(TestCase): def InitTriangularBarkBands(self, nbands): return TriangularBarkBands(inputSize=1024, numberBands=nbands, lowFrequencyBound=0, highFrequencyBound=44100*.5) def testRegression(self): spectrum = [1]*1024 mbands = self.InitTriangularBarkBands(24)(spectrum) self.assertEqual(len(mbands), 24 ) self.assert_(not any(numpy.isnan(mbands))) self.assert_(not any(numpy.isinf(mbands))) self.assertAlmostEqualVector(mbands, [1]*24, 1e-5) mbands = self.InitTriangularBarkBands(128)(spectrum) self.assertEqual(len(mbands), 128 ) self.assert_(not any(numpy.isnan(mbands))) self.assert_(not any(numpy.isinf(mbands))) self.assertAlmostEqualVector(mbands, [1]*128, 1e-5) def testRegressionRastaMode(self): # Test the BFCC extractor compared to Rastamat specifications audio = MonoLoader(filename = join(testdata.audio_dir, 'recorded/vignesh.wav'), sampleRate = 44100)()*2**15 #Expected values generated in Rastamat/MATLAB expected = [ 20.28919141, 23.80362425, 26.69797305, 27.10461133, 26.64508125, 26.7758322, 27.1787682, 27.10699792, 26.29040982, 25.04243486, 24.24791966, 24.17377063, 24.61976518, 25.29554584, 24.87617598, 23.79018513, 23.04026225, 23.20707811, 23.09716777, 23.33050168, 22.8201923, 21.49477903, 21.63639095, 22.12937291, 22.01981441, 21.70728156] frameSize = 1102 hopSize = 441 fftsize = 2048 paddingSize = fftsize - frameSize spectrumSize = int(fftsize/2) + 1 w = Windowing(type = 'hann', size = frameSize, zeroPadding = paddingSize, normalized = False, zeroPhase = False) spectrum = Spectrum(size = fftsize) mbands = TriangularBarkBands(inputSize= spectrumSize, type = 'power', highFrequencyBound = 8000, lowFrequencyBound = 0, numberBands = 26, weighting = 'linear', normalize = 'unit_max') pool = Pool() for frame in FrameGenerator(audio, frameSize = frameSize, hopSize = hopSize, startFromZero = True, validFrameThresholdRatio = 1): pool.add('TriangularBarkBands', mbands(spectrum(w(frame)))) np.savetxt("out.csv", np.mean(np.log(pool['TriangularBarkBands']),0), delimiter=',') self.assertAlmostEqualVector( np.mean(np.log(pool['TriangularBarkBands']),0), expected,1e-2) def testZero(self): # Inputting zeros should return zero. Try with different sizes size = 1024 while (size >= 256 ): self.assertEqualVector(TriangularBarkBands()(zeros(size)), zeros(24)) size /= 2 def testInvalidInput(self): # mel bands should fail for a spectrum with less than 2 bins self.assertComputeFails(TriangularBarkBands(), []) self.assertComputeFails(TriangularBarkBands(), [0.5]) def testInvalidParam(self): self.assertConfigureFails(TriangularBarkBands(), { 'numberBands': 0 }) self.assertConfigureFails(TriangularBarkBands(), { 'numberBands': 1 }) self.assertConfigureFails(TriangularBarkBands(), { 'lowFrequencyBound': -100 }) self.assertConfigureFails(TriangularBarkBands(), { 'lowFrequencyBound': 100, 'highFrequencyBound': 50 }) self.assertConfigureFails(TriangularBarkBands(), { 'highFrequencyBound': 30000, 'sampleRate': 22050}) def testWrongInputSize(self): # This test makes sure that even though the inputSize given at # configure time does not match the input spectrum, the algorithm does # not crash and correctly resizes internal structures to avoid errors. spec = [.1,.4,.5,.2,.1,.01,.04]*100 np.savetxt("out.csv", TriangularBarkBands(inputSize=1024, sampleRate=10, highFrequencyBound=4)(spec), delimiter=',') self.assertAlmostEqualVector( TriangularBarkBands(inputSize=1024, sampleRate=10, highFrequencyBound=4)(spec), [0.0460643246769905]*24, 1e-6) """ def testNotEnoughSpectrumBins(self): self.assertConfigureFails(TriangularBarkBands(), {'numberBands': 256, 'inputSize': 1025}) """ suite = allTests(TestTriangularBarkBands) if __name__ == '__main__': TextTestRunner(verbosity=2).run(suite)
class Location(object): def __init__(self, x, y): self.x = x self.y = y def __add__(self, direction): return Location(self.x + direction.x, self.y + direction.y) def __sub__(self, direction): return Location(self.x - direction.x, self.y - direction.y) def __repr__(self): return 'Location({}, {})'.format(self.x, self.y) def __eq__(self, other): return self.x == other.x and self.y == other.y def __hash__(self): return hash((self.x, self.y))
import sys import os import signal import platform from PyQt5.QtCore import Qt, QObject, QCoreApplication, QEvent, pyqtSlot, QLocale, QTranslator, QLibraryInfo, QT_VERSION_STR, PYQT_VERSION_STR from PyQt5.QtQml import QQmlApplicationEngine, qmlRegisterType, qmlRegisterSingletonType from PyQt5.QtWidgets import QApplication, QSplashScreen from PyQt5.QtGui import QGuiApplication, QPixmap from PyQt5.QtCore import QTimer from UM.Application import Application from UM.Qt.QtRenderer import QtRenderer from UM.Qt.Bindings.Bindings import Bindings from UM.Signal import Signal, signalemitter from UM.Resources import Resources from UM.Logger import Logger from UM.Preferences import Preferences from UM.i18n import i18nCatalog import UM.Settings.InstanceContainer #For version upgrade to know the version number. import UM.Settings.ContainerStack #For version upgrade to know the version number. import UM.Preferences #For version upgrade to know the version number. class UnsupportedVersionError(Exception): pass major, minor = PYQT_VERSION_STR.split(".")[0:2] if int(major) < 5 or int(minor) < 4: raise UnsupportedVersionError("This application requires at least PyQt 5.4.0") @signalemitter class QtApplication(QApplication, Application): def __init__(self, **kwargs): plugin_path = "" if sys.platform == "win32": if hasattr(sys, "frozen"): plugin_path = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), "PyQt5", "plugins") Logger.log("i", "Adding QT5 plugin path: %s" % (plugin_path)) QCoreApplication.addLibraryPath(plugin_path) else: import site for dir in site.getsitepackages(): QCoreApplication.addLibraryPath(os.path.join(dir, "PyQt5", "plugins")) elif sys.platform == "darwin": plugin_path = os.path.join(Application.getInstallPrefix(), "Resources", "plugins") if plugin_path: Logger.log("i", "Adding QT5 plugin path: %s" % (plugin_path)) QCoreApplication.addLibraryPath(plugin_path) os.environ["QSG_RENDER_LOOP"] = "basic" super().__init__(sys.argv, **kwargs) self._plugins_loaded = False #Used to determine when it's safe to use the plug-ins. self._main_qml = "main.qml" self._engine = None self._renderer = None self._main_window = None self._shutting_down = False self._qml_import_paths = [] self._qml_import_paths.append(os.path.join(os.path.dirname(sys.executable), "qml")) self._qml_import_paths.append(os.path.join(Application.getInstallPrefix(), "Resources", "qml")) self.setAttribute(Qt.AA_UseDesktopOpenGL) try: self._splash = self._createSplashScreen() except FileNotFoundError: self._splash = None else: self._splash.show() self.processEvents() signal.signal(signal.SIGINT, signal.SIG_DFL) # This is done here as a lot of plugins require a correct gl context. If you want to change the framework, # these checks need to be done in your <framework>Application.py class __init__(). i18n_catalog = i18nCatalog("uranium") self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading plugins...")) self._loadPlugins() self.parseCommandLine() Logger.log("i", "Command line arguments: %s", self._parsed_command_line) self._plugin_registry.checkRequiredPlugins(self.getRequiredPlugins()) self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Updating configuration...")) upgraded = UM.VersionUpgradeManager.VersionUpgradeManager.getInstance().upgrade() if upgraded: preferences = UM.Preferences.getInstance() #Preferences might have changed. Load them again. #Note that the language can't be updated, so that will always revert to English. try: preferences.readFromFile(Resources.getPath(Resources.Preferences, self._application_name + ".cfg")) except FileNotFoundError: pass self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading preferences...")) try: file = Resources.getPath(Resources.Preferences, self.getApplicationName() + ".cfg") Preferences.getInstance().readFromFile(file) except FileNotFoundError: pass def run(self): pass def hideMessage(self, message): with self._message_lock: if message in self._visible_messages: self._visible_messages.remove(message) self.visibleMessageRemoved.emit(message) def showMessage(self, message): with self._message_lock: if message not in self._visible_messages: self._visible_messages.append(message) message.setTimer(QTimer()) self.visibleMessageAdded.emit(message) def setMainQml(self, path): self._main_qml = path def initializeEngine(self): # TODO: Document native/qml import trickery Bindings.register() self._engine = QQmlApplicationEngine() for path in self._qml_import_paths: self._engine.addImportPath(path) if not hasattr(sys, "frozen"): self._engine.addImportPath(os.path.join(os.path.dirname(__file__), "qml")) self._engine.rootContext().setContextProperty("QT_VERSION_STR", QT_VERSION_STR) self._engine.rootContext().setContextProperty("screenScaleFactor", self._screenScaleFactor()) self.registerObjects(self._engine) self._engine.load(self._main_qml) self.engineCreatedSignal.emit() engineCreatedSignal = Signal() def isShuttingDown(self): return self._shutting_down def registerObjects(self, engine): pass def getRenderer(self): if not self._renderer: self._renderer = QtRenderer() return self._renderer def addCommandLineOptions(self, parser): parser.add_argument("--disable-textures", dest="disable-textures", action="store_true", default=False, help="Disable Qt texture loading as a workaround for certain crashes.") # Overridden from QApplication::setApplicationName to call our internal setApplicationName def setApplicationName(self, name): Application.setApplicationName(self, name) mainWindowChanged = Signal() def getMainWindow(self): return self._main_window def setMainWindow(self, window): if window != self._main_window: self._main_window = window self.mainWindowChanged.emit() # Handle a function that should be called later. def functionEvent(self, event): e = _QtFunctionEvent(event) QCoreApplication.postEvent(self, e) # Handle Qt events def event(self, event): if event.type() == _QtFunctionEvent.QtFunctionEvent: event._function_event.call() return True return super().event(event) def windowClosed(self): Logger.log("d", "Shutting down %s", self.getApplicationName()) self._shutting_down = True try: Preferences.getInstance().writeToFile(Resources.getStoragePath(Resources.Preferences, self.getApplicationName() + ".cfg")) except Exception as e: Logger.log("e", "Exception while saving preferences: %s", repr(e)) try: self.applicationShuttingDown.emit() except Exception as e: Logger.log("e", "Exception while emitting shutdown signal: %s", repr(e)) try: self.getBackend().close() except Exception as e: Logger.log("e", "Exception while closing backend: %s", repr(e)) self.quit() ## Load a Qt translation catalog. # # This method will locate, load and install a Qt message catalog that can be used # by Qt's translation system, like qsTr() in QML files. # # \param file The file name to load, without extension. It will be searched for in # the i18nLocation Resources directory. If it can not be found a warning # will be logged but no error will be thrown. # \param language The language to load translations for. This can be any valid language code # or 'default' in which case the language is looked up based on system locale. # If the specified language can not be found, this method will fall back to # loading the english translations file. # # \note When `language` is `default`, the language to load can be changed with the # environment variable "LANGUAGE". def loadQtTranslation(self, file, language = "default"): #TODO Add support for specifying a language from preferences path = None if language == "default": path = self._getDefaultLanguage(file) else: path = Resources.getPath(Resources.i18n, language, "LC_MESSAGES", file + ".qm") # If all else fails, fall back to english. if not path: Logger.log("w", "Could not find any translations matching {0} for file {1}, falling back to english".format(language, file)) try: path = Resources.getPath(Resources.i18n, "en", "LC_MESSAGES", file + ".qm") except FileNotFoundError: Logger.log("w", "Could not find English translations for file {0}. Switching to developer english.".format(file)) return translator = QTranslator() if not translator.load(path): Logger.log("e", "Unable to load translations %s", file) return # Store a reference to the translator. # This prevents the translator from being destroyed before Qt has a chance to use it. self._translators[file] = translator # Finally, install the translator so Qt can use it. self.installTranslator(translator) ## Display text on the splash screen. def showSplashMessage(self, message): if self._splash: self._splash.showMessage(message , Qt.AlignHCenter | Qt.AlignVCenter) self.processEvents() ## Close the splash screen after the application has started. def closeSplash(self): if self._splash: self._splash.close() self._splash = None def _createSplashScreen(self): return QSplashScreen(QPixmap(Resources.getPath(Resources.Images, self.getApplicationName() + ".png"))) def _screenScaleFactor(self): physical_dpi = QGuiApplication.primaryScreen().physicalDotsPerInch() # Typically 'normal' screens have a DPI around 96. Modern high DPI screens are up around 220. # We scale the low DPI screens with a traditional 1, and double the high DPI ones. return 1.0 if physical_dpi < 150 else 2.0 def _getDefaultLanguage(self, file): # If we have a language override set in the environment, try and use that. lang = os.getenv("URANIUM_LANGUAGE") if lang: try: return Resources.getPath(Resources.i18n, lang, "LC_MESSAGES", file + ".qm") except FileNotFoundError: pass # Else, try and get the current language from preferences lang = Preferences.getInstance().getValue("general/language") if lang: try: return Resources.getPath(Resources.i18n, lang, "LC_MESSAGES", file + ".qm") except FileNotFoundError: pass # If none of those are set, try to use the environment's LANGUAGE variable. lang = os.getenv("LANGUAGE") if lang: try: return Resources.getPath(Resources.i18n, lang, "LC_MESSAGES", file + ".qm") except FileNotFoundError: pass # If looking up the language from the enviroment or preferences fails, try and use Qt's system locale instead. locale = QLocale.system() # First, try and find a directory for any of the provided languages for lang in locale.uiLanguages(): try: return Resources.getPath(Resources.i18n, lang, "LC_MESSAGES", file + ".qm") except FileNotFoundError: pass # If that fails, see if we can extract a language "class" from the # preferred language. This will turn "en-GB" into "en" for example. lang = locale.uiLanguages()[0] lang = lang[0:lang.find("-")] try: return Resources.getPath(Resources.i18n, lang, "LC_MESSAGES", file + ".qm") except FileNotFoundError: pass return None class _QtFunctionEvent(QEvent): QtFunctionEvent = QEvent.User + 1 def __init__(self, fevent): super().__init__(self.QtFunctionEvent) self._function_event = fevent
"""Test Workbench Runtime""" from unittest import TestCase import mock from django.conf import settings from xblock.fields import Scope from xblock.runtime import KeyValueStore from xblock.runtime import KvsFieldData from xblock.reference.user_service import UserService from ..runtime import WorkbenchRuntime, ScenarioIdManager, WorkbenchDjangoKeyValueStore class TestScenarioIds(TestCase): """ Test XBlock Scenario IDs """ def setUp(self): # Test basic ID generation meets our expectations self.id_mgr = ScenarioIdManager() def test_no_scenario_loaded(self): self.assertEqual(self.id_mgr.create_definition("my_block"), ".my_block.d0") def test_should_increment(self): self.assertEqual(self.id_mgr.create_definition("my_block"), ".my_block.d0") self.assertEqual(self.id_mgr.create_definition("my_block"), ".my_block.d1") def test_slug_support(self): self.assertEqual( self.id_mgr.create_definition("my_block", "my_slug"), ".my_block.my_slug.d0" ) self.assertEqual( self.id_mgr.create_definition("my_block", "my_slug"), ".my_block.my_slug.d1" ) def test_scenario_support(self): self.test_should_increment() # Now that we have a scenario, our definition numbering starts over again. self.id_mgr.set_scenario("my_scenario") self.assertEqual(self.id_mgr.create_definition("my_block"), "my_scenario.my_block.d0") self.assertEqual(self.id_mgr.create_definition("my_block"), "my_scenario.my_block.d1") self.id_mgr.set_scenario("another_scenario") self.assertEqual(self.id_mgr.create_definition("my_block"), "another_scenario.my_block.d0") def test_usages(self): # Now make sure our usages are attached to definitions self.assertIsNone(self.id_mgr.last_created_usage_id()) self.assertEqual( self.id_mgr.create_usage("my_scenario.my_block.d0"), "my_scenario.my_block.d0.u0" ) self.assertEqual( self.id_mgr.create_usage("my_scenario.my_block.d0"), "my_scenario.my_block.d0.u1" ) self.assertEqual(self.id_mgr.last_created_usage_id(), "my_scenario.my_block.d0.u1") def test_asides(self): definition_id = self.id_mgr.create_definition('my_block') usage_id = self.id_mgr.create_usage(definition_id) aside_definition, aside_usage = self.id_mgr.create_aside(definition_id, usage_id, 'my_aside') self.assertEqual(self.id_mgr.get_aside_type_from_definition(aside_definition), 'my_aside') self.assertEqual(self.id_mgr.get_definition_id_from_aside(aside_definition), definition_id) self.assertEqual(self.id_mgr.get_aside_type_from_usage(aside_usage), 'my_aside') self.assertEqual(self.id_mgr.get_usage_id_from_aside(aside_usage), usage_id) class TestKVStore(TestCase): """ Test the Workbench KVP Store """ def setUp(self): self.kvs = WorkbenchDjangoKeyValueStore() self.key = KeyValueStore.Key( scope=Scope.content, user_id="rusty", block_scope_id="my_scenario.my_block.d0", field_name="age" ) def test_storage(self): self.assertFalse(self.kvs.has(self.key)) self.kvs.set(self.key, 7) self.assertTrue(self.kvs.has(self.key)) self.assertEqual(self.kvs.get(self.key), 7) self.kvs.delete(self.key) self.assertFalse(self.kvs.has(self.key)) class StubService(object): """Empty service to test loading additional services. """ pass class ExceptionService(object): """Stub service that raises an exception on init. """ def __init__(self): raise Exception("Kaboom!") class TestServices(TestCase): """ Test XBlock runtime services """ def setUp(self): super(TestServices, self).setUp() self.xblock = mock.Mock() def test_default_services(self): runtime = WorkbenchRuntime('test_user') self._assert_default_services(runtime) @mock.patch.dict(settings.WORKBENCH['services'], { 'stub': 'workbench.test.test_runtime.StubService' }) def test_settings_adds_services(self): runtime = WorkbenchRuntime('test_user') # Default services should still be available self._assert_default_services(runtime) # An additional service should be provided self._assert_service(runtime, 'stub', StubService) # Check that the service has the runtime attribute set service = runtime.service(self.xblock, 'stub') self.assertIs(service.runtime, runtime) @mock.patch.dict(settings.WORKBENCH['services'], { 'not_found': 'workbench.test.test_runtime.NotFoundService' }) def test_could_not_find_service(self): runtime = WorkbenchRuntime('test_user') # Default services should still be available self._assert_default_services(runtime) # The additional service should NOT be available self.assertIs(runtime.service(self.xblock, 'not_found'), None) @mock.patch.dict(settings.WORKBENCH['services'], { 'exception': 'workbench.test.test_runtime.ExceptionService' }) def test_runtime_service_initialization_failed(self): runtime = WorkbenchRuntime('test_user') # Default services should still be available self._assert_default_services(runtime) # The additional service should NOT be available self.assertIs(runtime.service(self.xblock, 'exception'), None) def _assert_default_services(self, runtime): """Check that the default services are available. """ self._assert_service(runtime, 'field-data', KvsFieldData) self._assert_service(runtime, 'user', UserService) def _assert_service(self, runtime, service_name, service_class): """Check that a service is loaded. """ service_instance = runtime.service(self.xblock, service_name) self.assertIsInstance(service_instance, service_class)
""" Asset compilation and collection. """ from __future__ import print_function import argparse from paver.easy import sh, path, task, cmdopts, needs, consume_args, call_task from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler import glob import traceback from .utils.envs import Env from .utils.cmd import cmd, django_cmd COFFEE_DIRS = ['lms', 'cms', 'common'] SASS_LOAD_PATHS = ['./common/static/sass'] SASS_UPDATE_DIRS = ['*/static'] SASS_CACHE_PATH = '/tmp/sass-cache' class CoffeeScriptWatcher(PatternMatchingEventHandler): """ Watches for coffeescript changes """ ignore_directories = True patterns = ['*.coffee'] def register(self, observer): """ register files with observer """ dirnames = set() for filename in sh(coffeescript_files(), capture=True).splitlines(): dirnames.add(path(filename).dirname()) for dirname in dirnames: observer.schedule(self, dirname) def on_modified(self, event): print('\tCHANGED:', event.src_path) try: compile_coffeescript(event.src_path) except Exception: # pylint: disable=W0703 traceback.print_exc() class SassWatcher(PatternMatchingEventHandler): """ Watches for sass file changes """ ignore_directories = True patterns = ['*.scss'] ignore_patterns = ['common/static/xmodule/*'] def register(self, observer): """ register files with observer """ for dirname in SASS_LOAD_PATHS + SASS_UPDATE_DIRS + theme_sass_paths(): paths = [] if '*' in dirname: paths.extend(glob.glob(dirname)) else: paths.append(dirname) for dirname in paths: observer.schedule(self, dirname, recursive=True) def on_modified(self, event): print('\tCHANGED:', event.src_path) try: compile_sass() except Exception: # pylint: disable=W0703 traceback.print_exc() class XModuleSassWatcher(SassWatcher): """ Watches for sass file changes """ ignore_directories = True ignore_patterns = [] def register(self, observer): """ register files with observer """ observer.schedule(self, 'common/lib/xmodule/', recursive=True) def on_modified(self, event): print('\tCHANGED:', event.src_path) try: process_xmodule_assets() except Exception: # pylint: disable=W0703 traceback.print_exc() def theme_sass_paths(): """ Return the a list of paths to the theme's sass assets, or an empty list if no theme is configured. """ edxapp_env = Env() if edxapp_env.feature_flags.get('USE_CUSTOM_THEME', False): theme_name = edxapp_env.env_tokens.get('THEME_NAME', '') parent_dir = path(edxapp_env.REPO_ROOT).abspath().parent theme_root = parent_dir / "themes" / theme_name return [theme_root / "static" / "sass"] else: return [] def coffeescript_files(): """ return find command for paths containing coffee files """ dirs = " ".join([Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS]) return cmd('find', dirs, '-type f', '-name \"*.coffee\"') def compile_coffeescript(*files): """ Compile CoffeeScript to JavaScript. """ if not files: files = ["`{}`".format(coffeescript_files())] sh(cmd( "node_modules/.bin/coffee", "--compile", *files )) def compile_sass(debug=False): """ Compile Sass to CSS. """ theme_paths = theme_sass_paths() sh(cmd( 'sass', '' if debug else '--style compressed', "--cache-location {cache}".format(cache=SASS_CACHE_PATH), "--load-path", " ".join(SASS_LOAD_PATHS + theme_paths), "--update", "-E", "utf-8", " ".join(SASS_UPDATE_DIRS + theme_paths) )) def compile_templated_sass(systems, settings): """ Render Mako templates for Sass files. `systems` is a list of systems (e.g. 'lms' or 'studio' or both) `settings` is the Django settings module to use. """ for sys in systems: sh(django_cmd(sys, settings, 'preprocess_assets')) def process_xmodule_assets(): """ Process XModule static assets. """ sh('xmodule_assets common/static/xmodule') def collect_assets(systems, settings): """ Collect static assets, including Django pipeline processing. `systems` is a list of systems (e.g. 'lms' or 'studio' or both) `settings` is the Django settings module to use. """ for sys in systems: sh(django_cmd(sys, settings, "collectstatic --noinput > /dev/null")) @task @cmdopts([('background', 'b', 'Background mode')]) def watch_assets(options): """ Watch for changes to asset files, and regenerate js/css """ observer = Observer() CoffeeScriptWatcher().register(observer) SassWatcher().register(observer) XModuleSassWatcher().register(observer) print("Starting asset watcher...") observer.start() if not getattr(options, 'background', False): # when running as a separate process, the main thread needs to loop # in order to allow for shutdown by contrl-c try: while True: observer.join(2) except KeyboardInterrupt: observer.stop() print("\nStopped asset watcher.") @task @needs('pavelib.prereqs.install_prereqs') @consume_args def update_assets(args): """ Compile CoffeeScript and Sass, then collect static assets. """ parser = argparse.ArgumentParser(prog='paver update_assets') parser.add_argument( 'system', type=str, nargs='*', default=['lms', 'studio'], help="lms or studio", ) parser.add_argument( '--settings', type=str, default="dev", help="Django settings module", ) parser.add_argument( '--debug', action='store_true', default=False, help="Disable Sass compression", ) parser.add_argument( '--skip-collect', dest='collect', action='store_false', default=True, help="Skip collection of static assets", ) parser.add_argument( '--watch', action='store_true', default=False, help="Watch files for changes", ) args = parser.parse_args(args) compile_templated_sass(args.system, args.settings) process_xmodule_assets() compile_coffeescript() compile_sass(args.debug) if args.collect: collect_assets(args.system, args.settings) if args.watch: call_task('watch_assets', options={'background': not args.debug})
from . import orderpoint_generator
"""Tests of the oopsreferences core.""" __metaclass__ = type from datetime import ( datetime, timedelta, ) from pytz import utc from lp.registry.model.oopsreferences import referenced_oops from lp.services.database.interfaces import IStore from lp.services.messages.model.message import ( Message, MessageSet, ) from lp.testing import ( person_logged_in, TestCaseWithFactory, ) from lp.testing.layers import DatabaseFunctionalLayer class TestOopsReferences(TestCaseWithFactory): layer = DatabaseFunctionalLayer def setUp(self): super(TestOopsReferences, self).setUp() self.store = IStore(Message) def test_oops_in_messagechunk(self): oopsid = "OOPS-abcdef1234" MessageSet().fromText('foo', "foo %s bar" % oopsid) self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=1", {})) self.failUnlessEqual( set(), referenced_oops(now + day, now + day, "product=1", {})) def test_oops_in_messagesubject(self): oopsid = "OOPS-abcdef1234" self.factory.makeEmailMessage() MessageSet().fromText("Crash with %s" % oopsid, "body") self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=1", {})) self.failUnlessEqual( set(), referenced_oops(now + day, now + day, "product=1", {})) def test_oops_in_bug_title(self): oopsid = "OOPS-abcdef1234" bug = self.factory.makeBug() with person_logged_in(bug.owner): bug.title = "Crash with %s" % oopsid self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=1", {})) self.failUnlessEqual( set(), referenced_oops(now + day, now + day, "product=1", {})) def test_oops_in_bug_description(self): oopsid = "OOPS-abcdef1234" bug = self.factory.makeBug() with person_logged_in(bug.owner): bug.description = "Crash with %s" % oopsid self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=1", {})) self.failUnlessEqual( set(), referenced_oops(now + day, now + day, "product=1", {})) def test_oops_in_question_title(self): oopsid = "OOPS-abcdef1234" question = self.factory.makeQuestion(title="Crash with %s" % oopsid) self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=%(product)s", {'product': question.product.id})) self.failUnlessEqual( set([]), referenced_oops(now + day, now + day, "product=%(product)s", {'product': question.product.id})) def test_oops_in_question_wrong_context(self): oopsid = "OOPS-abcdef1234" question = self.factory.makeQuestion(title="Crash with %s" % oopsid) self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.store.flush() self.failUnlessEqual( set(), referenced_oops(now - day, now, "product=%(product)s", {'product': question.product.id + 1})) def test_oops_in_question_description(self): oopsid = "OOPS-abcdef1234" question = self.factory.makeQuestion( description="Crash with %s" % oopsid) self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=%(product)s", {'product': question.product.id})) self.failUnlessEqual( set([]), referenced_oops(now + day, now + day, "product=%(product)s", {'product': question.product.id})) def test_oops_in_question_whiteboard(self): oopsid = "OOPS-abcdef1234" question = self.factory.makeQuestion() with person_logged_in(question.owner): question.whiteboard = "Crash with %s" % oopsid self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "product=%(product)s", {'product': question.product.id})) self.failUnlessEqual( set([]), referenced_oops(now + day, now + day, "product=%(product)s", {'product': question.product.id})) def test_oops_in_question_distribution(self): oopsid = "OOPS-abcdef1234" distro = self.factory.makeDistribution() question = self.factory.makeQuestion(target=distro) with person_logged_in(question.owner): question.whiteboard = "Crash with %s" % oopsid self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid]), referenced_oops(now - day, now, "distribution=%(distribution)s", {'distribution': distro.id})) self.failUnlessEqual( set([]), referenced_oops(now + day, now + day, "distribution=%(distribution)s", {'distribution': distro.id})) def test_referenced_oops_in_urls_bug_663249(self): # Sometimes OOPS ids appears as part of an URL. These should could as # a reference even though they are not formatted specially - this # requires somewhat special handling in the reference calculation # function. oopsid_old = "OOPS-abcdef1234" oopsid_new = "OOPS-4321" bug_old = self.factory.makeBug() bug_new = self.factory.makeBug() with person_logged_in(bug_old.owner): bug_old.description = ( "foo https://lp-oops.canonical.com/oops.py?oopsid=%s bar" % oopsid_old) with person_logged_in(bug_new.owner): bug_new.description = ( "foo https://oops.canonical.com/oops.py?oopsid=%s bar" % oopsid_new) self.store.flush() now = datetime.now(tz=utc) day = timedelta(days=1) self.failUnlessEqual( set([oopsid_old, oopsid_new]), referenced_oops(now - day, now, "product=1", {})) self.failUnlessEqual( set([]), referenced_oops(now + day, now + day, "product=1", {}))
{ "name": "Mozaik Mass Mailing Access Rights", "summary": """ New group: Mass Mailing Manager. Managers can edit and unlink mass mailings.""", "version": "14.0.1.0.0", "license": "AGPL-3", "author": "ACSONE SA/NV", "website": "https://github.com/OCA/mozaik", "depends": [ "mass_mailing", ], "data": [ "security/groups.xml", "security/ir.model.access.csv", "views/mailing_mailing.xml", "views/mail_template.xml", ], "demo": [], }
import typing import math import contextlib from timeit import default_timer from operator import itemgetter from searx.engines import engines from .models import HistogramStorage, CounterStorage from .error_recorder import count_error, count_exception, errors_per_engines __all__ = ["initialize", "get_engines_stats", "get_engine_errors", "histogram", "histogram_observe", "histogram_observe_time", "counter", "counter_inc", "counter_add", "count_error", "count_exception"] ENDPOINTS = {'search'} histogram_storage: typing.Optional[HistogramStorage] = None counter_storage: typing.Optional[CounterStorage] = None @contextlib.contextmanager def histogram_observe_time(*args): h = histogram_storage.get(*args) before = default_timer() yield before duration = default_timer() - before if h: h.observe(duration) else: raise ValueError("histogram " + repr((*args,)) + " doesn't not exist") def histogram_observe(duration, *args): histogram_storage.get(*args).observe(duration) def histogram(*args, raise_on_not_found=True): h = histogram_storage.get(*args) if raise_on_not_found and h is None: raise ValueError("histogram " + repr((*args,)) + " doesn't not exist") return h def counter_inc(*args): counter_storage.add(1, *args) def counter_add(value, *args): counter_storage.add(value, *args) def counter(*args): return counter_storage.get(*args) def initialize(engine_names=None): """ Initialize metrics """ global counter_storage, histogram_storage # pylint: disable=global-statement counter_storage = CounterStorage() histogram_storage = HistogramStorage() # max_timeout = max of all the engine.timeout max_timeout = 2 for engine_name in (engine_names or engines): if engine_name in engines: max_timeout = max(max_timeout, engines[engine_name].timeout) # histogram configuration histogram_width = 0.1 histogram_size = int(1.5 * max_timeout / histogram_width) # engines for engine_name in (engine_names or engines): # search count counter_storage.configure('engine', engine_name, 'search', 'count', 'sent') counter_storage.configure('engine', engine_name, 'search', 'count', 'successful') # global counter of errors counter_storage.configure('engine', engine_name, 'search', 'count', 'error') # score of the engine counter_storage.configure('engine', engine_name, 'score') # result count per requests histogram_storage.configure(1, 100, 'engine', engine_name, 'result', 'count') # time doing HTTP requests histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'http') # total time # .time.request and ...response times may overlap .time.http time. histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'total') def get_engine_errors(engline_name_list): result = {} engine_names = list(errors_per_engines.keys()) engine_names.sort() for engine_name in engine_names: if engine_name not in engline_name_list: continue error_stats = errors_per_engines[engine_name] sent_search_count = max(counter('engine', engine_name, 'search', 'count', 'sent'), 1) sorted_context_count_list = sorted(error_stats.items(), key=lambda context_count: context_count[1]) r = [] for context, count in sorted_context_count_list: percentage = round(20 * count / sent_search_count) * 5 r.append({ 'filename': context.filename, 'function': context.function, 'line_no': context.line_no, 'code': context.code, 'exception_classname': context.exception_classname, 'log_message': context.log_message, 'log_parameters': context.log_parameters, 'secondary': context.secondary, 'percentage': percentage, }) result[engine_name] = sorted(r, reverse=True, key=lambda d: d['percentage']) return result def get_reliabilities(engline_name_list, checker_results): reliabilities = {} engine_errors = get_engine_errors(engline_name_list) for engine_name in engline_name_list: checker_result = checker_results.get(engine_name, {}) checker_success = checker_result.get('success', True) errors = engine_errors.get(engine_name) or [] if counter('engine', engine_name, 'search', 'count', 'sent') == 0: # no request reliablity = None elif checker_success and not errors: reliablity = 100 elif 'simple' in checker_result.get('errors', {}): # the basic (simple) test doesn't work: the engine is broken accoding to the checker # even if there is no exception reliablity = 0 else: reliablity = 100 - sum([error['percentage'] for error in errors if not error.get('secondary')]) reliabilities[engine_name] = { 'reliablity': reliablity, 'errors': errors, 'checker': checker_results.get(engine_name, {}).get('errors', {}), } return reliabilities def get_engines_stats(engine_name_list): assert counter_storage is not None assert histogram_storage is not None list_time = [] max_time_total = max_result_count = None for engine_name in engine_name_list: sent_count = counter('engine', engine_name, 'search', 'count', 'sent') if sent_count == 0: continue result_count = histogram('engine', engine_name, 'result', 'count').percentage(50) result_count_sum = histogram('engine', engine_name, 'result', 'count').sum successful_count = counter('engine', engine_name, 'search', 'count', 'successful') time_total = histogram('engine', engine_name, 'time', 'total').percentage(50) max_time_total = max(time_total or 0, max_time_total or 0) max_result_count = max(result_count or 0, max_result_count or 0) stats = { 'name': engine_name, 'total': None, 'total_p80': None, 'total_p95': None, 'http': None, 'http_p80': None, 'http_p95': None, 'processing': None, 'processing_p80': None, 'processing_p95': None, 'score': 0, 'score_per_result': 0, 'result_count': result_count, } if successful_count and result_count_sum: score = counter('engine', engine_name, 'score') stats['score'] = score stats['score_per_result'] = score / float(result_count_sum) time_http = histogram('engine', engine_name, 'time', 'http').percentage(50) time_http_p80 = time_http_p95 = 0 if time_http is not None: time_http_p80 = histogram('engine', engine_name, 'time', 'http').percentage(80) time_http_p95 = histogram('engine', engine_name, 'time', 'http').percentage(95) stats['http'] = round(time_http, 1) stats['http_p80'] = round(time_http_p80, 1) stats['http_p95'] = round(time_http_p95, 1) if time_total is not None: time_total_p80 = histogram('engine', engine_name, 'time', 'total').percentage(80) time_total_p95 = histogram('engine', engine_name, 'time', 'total').percentage(95) stats['total'] = round(time_total, 1) stats['total_p80'] = round(time_total_p80, 1) stats['total_p95'] = round(time_total_p95, 1) stats['processing'] = round(time_total - (time_http or 0), 1) stats['processing_p80'] = round(time_total_p80 - time_http_p80, 1) stats['processing_p95'] = round(time_total_p95 - time_http_p95, 1) list_time.append(stats) return { 'time': list_time, 'max_time': math.ceil(max_time_total or 0), 'max_result_count': math.ceil(max_result_count or 0), }
import os import geoip2.database from geoip2.errors import AddressNotFoundError from cortexutils.analyzer import Analyzer class MaxMindAnalyzer(Analyzer): def dump_city(self, city): return { 'confidence': city.confidence, 'geoname_id': city.geoname_id, 'name': city.name, 'names': city.names } def dump_continent(self, continent): return { 'code': continent.code, 'geoname_id': continent.geoname_id, 'name': continent.name, 'names': continent.names, } def dump_country(self, country): return { 'confidence': country.confidence, 'geoname_id': country.geoname_id, 'iso_code': country.iso_code, 'name': country.name, 'names': country.names } def dump_location(self, location): return { 'accuracy_radius': location.accuracy_radius, 'latitude': location.latitude, 'longitude': location.longitude, 'metro_code': location.metro_code, 'time_zone': location.time_zone } def dump_traits(self, traits): return { 'autonomous_system_number': traits.autonomous_system_number, 'autonomous_system_organization': traits.autonomous_system_organization, 'domain': traits.domain, 'ip_address': traits.ip_address, 'is_anonymous_proxy': traits.is_anonymous_proxy, 'is_satellite_provider': traits.is_satellite_provider, 'isp': traits.isp, 'organization': traits.organization, 'user_type': traits.user_type } def summary(self, raw): taxonomies = [] level = "info" namespace = "MaxMind" predicate = "Location" if "continent" in raw: value = "{}/{}".format(raw["country"]["name"], raw["continent"]["name"]) taxonomies.append(self.build_taxonomy(level, namespace, predicate, value)) return {"taxonomies": taxonomies} def run(self): Analyzer.run(self) if self.data_type == 'ip': try: data = self.get_data() city = geoip2.database.Reader(os.path.dirname(__file__) + '/GeoLite2-City.mmdb').city(data) self.report({ 'city': self.dump_city(city.city), 'continent': self.dump_continent(city.continent), 'country': self.dump_country(city.country), 'location': self.dump_location(city.location), 'registered_country': self.dump_country(city.registered_country), 'represented_country': self.dump_country(city.represented_country), 'subdivisions': self.dump_country(city.subdivisions.most_specific), 'traits': self.dump_traits(city.traits) }) except ValueError as e: self.error('Invalid IP address') except AddressNotFoundError as e: self.error('Unknown IP address') except Exception as e: self.unexpectedError(type(e)) else: self.notSupported() if __name__ == '__main__': MaxMindAnalyzer().run()
from contextlib import suppress from ereuse_devicehub.resources.account.domain import AccountDomain, UserNotFound from ereuse_devicehub.resources.device.domain import DeviceDomain def materialize_actual_owners_remove(events: list): for event in events: properties = {'$pull': {'owners': event['from']}} DeviceDomain.update_raw(event.get('components', []), properties) return DeviceDomain.update_raw(event['devices'], properties) def set_organization(deallocates: list): for deallocate in deallocates: with suppress(UserNotFound, KeyError): # todo ensure organization is not always needed deallocate['fromOrganization'] = AccountDomain.get_one(deallocate['from'])['organization']
import re import sys INFINITY = float('inf') line_to_ints = lambda line: [int(x, 10) for x in re.split(' *', line)] def solver(width, height, grid): prevs = [[0, list()] for _ in range(width)] for (y, line) in enumerate(grid): #print(line) currents = [] for (x,cost) in enumerate(line): if cost < 0: cost = INFINITY cur = [INFINITY, []] for dx in [-1,0,1]: if x+dx < 0 or x+dx >= width: continue #print(x,x+dx) tmp = prevs[x+dx] if tmp[0]+cost < cur[0]: cur[0] = tmp[0] + cost cur[1] = tmp[1] + [x] currents.append(cur) prevs = currents solution = min([x for (x,_) in prevs]) if solution == INFINITY: solution = -1 return solution def main(): lines = sys.stdin.read().split('\n') (height, width) = line_to_ints(lines[0]) #print(height,width) grid = [line_to_ints(line) for line in lines[1:height+1]] print(solver(width, height, grid)) if __name__ == '__main__': main()
""" API definition """ from tastypie import fields from tastypie.resources import ModelResource from tastypie.throttle import BaseThrottle from cotetra.survey.models import Journey, Connection from cotetra.network.api import StationResource class JourneyResource(ModelResource): """ The journeys """ station_from = fields.ForeignKey(StationResource, 'station_from') station_to = fields.ForeignKey(StationResource, 'station_to') class Meta: queryset = Journey.objects.all() resource_name = 'journey' throttle = BaseThrottle(throttle_at=100, timeframe=60) class ConnectionResource(ModelResource): """ The connections """ station_from = fields.ForeignKey(StationResource, 'station_from') station_to = fields.ForeignKey(StationResource, 'station_to') class Meta: queryset = Connection.objects.all() resource_name = 'connection' throttle = BaseThrottle(throttle_at=100, timeframe=60)
SECRET_KEY = ''
import json import xml.etree.ElementTree as ET import urllib2 import werkzeug.utils from openerp.addons.web import http from openerp.addons.web.http import request from openerp.addons.website.controllers.main import Website class Website(Website): @http.route(['/<path:seo_url>'], type='http', auth="public", website=True) def path_page(self, seo_url, **kwargs): """Handle SEO urls for ir.ui.views. ToDo: Add additional check for field seo_url_parent. Otherwise it is possible to use invalid url structures. For example: if you have two pages 'study-1' and 'study-2' with the same seo_url_level and different seo_url_parent you can use '/ecommerce/study-1/how-to-do-it-right' and '/ecommerce/study-2/how-to-do-it-right' to call the page 'how-to-do-it-right'. """ env = request.env(context=request.context) seo_url_parts = [s.encode('utf8') for s in seo_url.split('/') if s != ''] views = env['ir.ui.view'].search([('seo_url', 'in', seo_url_parts)], order='seo_url_level ASC') page = 'website.404' if len(seo_url_parts) == len(views): seo_url_check = [v.seo_url.encode('utf8') for v in views] current_view = views[-1] if (seo_url_parts == seo_url_check and (current_view.seo_url_level + 1) == len(views)): page = current_view.xml_id if page == 'website.404': try: url = self.look_for_redirect_url(seo_url, **kwargs) if url: return request.redirect(url, code=301) assert url is not None except Exception, e: return request.registry['ir.http']._handle_exception(e, 404) if page == 'website.404' and request.website.is_publisher(): page = 'website.page_404' return request.render(page, {}) def look_for_redirect_url(self, seo_url, **kwargs): env = request.env(context=request.context) if not seo_url.startswith('/'): seo_url = '/' + seo_url lang = env.context.get('lang', False) if not lang: lang = request.website.default_lang_code lang = env['res.lang'].get_code_from_alias(lang) domain = [('url', '=', seo_url), ('lang', '=', lang)] data = env['website.seo.redirect'].search(domain) if data: model, rid = data[0].resource.split(',') resource = env[model].browse(int(rid)) return resource.get_seo_path()[0] @http.route() def page(self, page, **opt): try: view = request.website.get_template(page) if view.seo_url: return request.redirect(view.get_seo_path()[0], code=301) except: pass return super(Website, self).page(page, **opt) @http.route(['/website/seo_suggest'], type='json', auth='user', website=True) def seo_suggest(self, keywords=None, lang=None): url = "http://google.com/complete/search" try: params = { 'ie': 'utf8', 'oe': 'utf8', 'output': 'toolbar', 'q': keywords, } if lang: language = lang.split("_") params.update({ 'hl': language[0], 'gl': language[1] if len(language) > 1 else '' }) req = urllib2.Request("%s?%s" % (url, werkzeug.url_encode(params))) request = urllib2.urlopen(req) except (urllib2.HTTPError, urllib2.URLError): # TODO: shouldn't this return {} ? return [] xmlroot = ET.fromstring(request.read()) return [sugg[0].attrib['data'] for sugg in xmlroot if len(sugg) and sugg[0].attrib['data']]
def test_imprint(app, client): app.config["SKYLINES_IMPRINT"] = u"foobar" res = client.get("/imprint") assert res.status_code == 200 assert res.json == {u"content": u"foobar"} def test_team(client): res = client.get("/team") assert res.status_code == 200 content = res.json["content"] assert "## Developers" in content assert "* Tobias Bieniek (<tobias.bieniek@gmx.de> // maintainer)\n" in content assert "## Developers" in content def test_license(client): res = client.get("/license") assert res.status_code == 200 content = res.json["content"] assert "GNU AFFERO GENERAL PUBLIC LICENSE" in content
""" Django settings for kore project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) SECRET_KEY = '9j++(0=dc&6w&113d4bofcjy1xy-pe$frla&=s*8w94=0ym0@&' DEBUG = True ALLOWED_HOSTS = [] INSTALLED_APPS = [ 'grappelli', 'nested_admin', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.gis', 'raven.contrib.django.raven_compat', 'django_extensions', 'rest_framework', 'corsheaders', 'modeltranslation', 'leaflet', 'munigeo', 'schools', 'django_filters' ] if DEBUG: # INSTALLED_APPS.insert(0, 'devserver') # INSTALLED_APPS.insert(0, 'debug_toolbar') pass MIDDLEWARE_CLASSES = ( 'django.middleware.locale.LocaleMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'kore.urls' WSGI_APPLICATION = 'kore.wsgi.application' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'kore', } } PROJECTION_SRID = 3067 DEFAULT_COUNTRY = 'fi' DEFAULT_OCD_MUNICIPALITY = 'kunta' BOUNDING_BOX = [-548576, 6291456, 1548576, 8388608] LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True gettext = lambda s: s LANGUAGES = ( ('fi', gettext('Finnish')), ('sv', gettext('Swedish')), ('en', gettext('English')), ) STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, "var", "static") LOCALE_PATH = os.path.join(BASE_DIR, "schools", "locale") REST_FRAMEWORK = { 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', 'PAGE_SIZE': 20, 'MAX_PAGINATE_BY': 1000, # Maximum limit allowed when using `?page_size=xxx`. 'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',), 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', 'rest_framework.renderers.BrowsableAPIRenderer', ) } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] CORS_ORIGIN_ALLOW_ALL = True try: from local_settings import * except ImportError: pass
{ "name": "Mozaik Website Event Track", "summary": """ This module allows to see the event menu configuration even without activated debug mode""", "version": "14.0.1.0.0", "license": "AGPL-3", "author": "ACSONE SA/NV", "website": "https://github.com/OCA/mozaik", "depends": [ # Odoo "website_event_track", ], "data": [ "views/event_event.xml", ], }
""" Mappers ======= Mappers are the ConnectorUnit classes responsible to transform external records into OpenERP records and conversely. """ import logging from collections import namedtuple from contextlib import contextmanager from ..connector import ConnectorUnit, MetaConnectorUnit, ConnectorEnvironment from ..exception import MappingError, NoConnectorUnitError _logger = logging.getLogger(__name__) def mapping(func): """ Declare that a method is a mapping method. It is then used by the :py:class:`Mapper` to convert the records. Usage:: @mapping def any(self, record): return {'output_field': record['input_field']} """ func.is_mapping = True return func def changed_by(*args): """ Decorator for the mapping methods (:py:func:`mapping`) When fields are modified in OpenERP, we want to export only the modified fields. Using this decorator, we can specify which fields updates should trigger which mapping method. If ``changed_by`` is empty, the mapping is always active. As far as possible, this decorator should be used for the exports, thus, when we do an update on only a small number of fields on a record, the size of the output record will be limited to only the fields really having to be exported. Usage:: @changed_by('input_field') @mapping def any(self, record): return {'output_field': record['input_field']} :param *args: field names which trigger the mapping when modified """ def register_mapping(func): func.changed_by = args return func return register_mapping def only_create(func): """ Decorator for the mapping methods (:py:func:`mapping`) A mapping decorated with ``only_create`` means that it has to be used only for the creation of the records. Usage:: @only_create @mapping def any(self, record): return {'output_field': record['input_field']} """ func.only_create = True return func def none(field): """ A modifier intended to be used on the ``direct`` mappings. Replace the False-ish values by None. It can be used in a pipeline of modifiers when . Example:: direct = [(none('source'), 'target'), (none(m2o_to_backend('rel_id'), 'rel_id')] :param field: name of the source field in the record :param binding: True if the relation is a binding record """ def modifier(self, record, to_attr): if callable(field): result = field(self, record, to_attr) else: result = record[field] if not result: return None return result return modifier def convert(field, conv_type): """ A modifier intended to be used on the ``direct`` mappings. Convert a field's value to a given type. Example:: direct = [(convert('source', str), 'target')] :param field: name of the source field in the record :param binding: True if the relation is a binding record """ def modifier(self, record, to_attr): value = record[field] if not value: return False return conv_type(value) return modifier def m2o_to_backend(field, binding=None): """ A modifier intended to be used on the ``direct`` mappings. For a many2one, get the ID on the backend and returns it. When the field's relation is not a binding (i.e. it does not point to something like ``magento.*``), the binding model needs to be provided in the ``binding`` keyword argument. Example:: direct = [(m2o_to_backend('country_id', binding='magento.res.country'), 'country'), (m2o_to_backend('magento_country_id'), 'country')] :param field: name of the source field in the record :param binding: name of the binding model is the relation is not a binding """ def modifier(self, record, to_attr): if not record[field]: return False column = self.model._fields[field] if column.type != 'many2one': raise ValueError('The column %s should be a Many2one, got %s' % (field, type(column))) rel_id = record[field].id if binding is None: binding_model = column.comodel_name else: binding_model = binding binder = self.binder_for(binding_model) # if a relation is not a binding, we wrap the record in the # binding, we'll return the id of the binding wrap = bool(binding) value = binder.to_backend(rel_id, wrap=wrap) if not value: raise MappingError("Can not find an external id for record " "%s in model %s %s wrapping" % (rel_id, binding_model, 'with' if wrap else 'without')) return value return modifier def backend_to_m2o(field, binding=None, with_inactive=False): """ A modifier intended to be used on the ``direct`` mappings. For a field from a backend which is an ID, search the corresponding binding in OpenERP and returns its ID. When the field's relation is not a binding (i.e. it does not point to something like ``magento.*``), the binding model needs to be provided in the ``binding`` keyword argument. Example:: direct = [(backend_to_m2o('country', binding='magento.res.country'), 'country_id'), (backend_to_m2o('country'), 'magento_country_id')] :param field: name of the source field in the record :param binding: name of the binding model is the relation is not a binding :param with_inactive: include the inactive records in OpenERP in the search """ def modifier(self, record, to_attr): if not record[field]: return False column = self.model._fields[to_attr] if column.type != 'many2one': raise ValueError('The column %s should be a Many2one, got %s' % (to_attr, type(column))) rel_id = record[field] if binding is None: binding_model = column.comodel_name else: binding_model = binding binder = self.binder_for(binding_model) # if we want the ID of a normal record, not a binding, # we ask the unwrapped id to the binder unwrap = bool(binding) with self.session.change_context(active_test=False): value = binder.to_openerp(rel_id, unwrap=unwrap) if not value: raise MappingError("Can not find an existing %s for external " "record %s %s unwrapping" % (binding_model, rel_id, 'with' if unwrap else 'without')) return value return modifier MappingDefinition = namedtuple('MappingDefinition', ['changed_by', 'only_create']) class MetaMapper(MetaConnectorUnit): """ Metaclass for Mapper Build a ``_map_methods`` dict of mappings methods. The keys of the dict are the method names. The values of the dict are a namedtuple containing: """ def __new__(meta, name, bases, attrs): if attrs.get('_map_methods') is None: attrs['_map_methods'] = {} cls = super(MetaMapper, meta).__new__(meta, name, bases, attrs) # When a class has several bases: ``class Mapper(Base1, Base2):`` for base in bases: # Merge the _map_methods of the bases base_map_methods = getattr(base, '_map_methods', {}) for attr_name, definition in base_map_methods.iteritems(): if cls._map_methods.get(attr_name) is None: cls._map_methods[attr_name] = definition else: # Update the existing @changed_by with the content # of each base (it is mutated in place). # @only_create keeps the value defined in the first # base. mapping_changed_by = cls._map_methods[attr_name].changed_by mapping_changed_by.update(definition.changed_by) # Update the _map_methods from the @mapping methods in attrs, # respecting the class tree. for attr_name, attr in attrs.iteritems(): is_mapping = getattr(attr, 'is_mapping', None) if is_mapping: has_only_create = getattr(attr, 'only_create', False) mapping_changed_by = set(getattr(attr, 'changed_by', ())) # If already existing, it has been defined in a super # class, extend the @changed_by set if cls._map_methods.get(attr_name) is not None: definition = cls._map_methods[attr_name] mapping_changed_by.update(definition.changed_by) # keep the last choice for only_create definition = MappingDefinition(mapping_changed_by, has_only_create) cls._map_methods[attr_name] = definition return cls def __init__(cls, name, bases, attrs): """ Build a ``_changed_by_fields`` list of synchronized fields with mapper. It takes in account the ``direct`` fields and the fields declared in the decorator : ``changed_by``. """ changed_by_fields = set() if attrs.get('direct'): for from_attr, __ in attrs['direct']: attr_name = cls._direct_source_field_name(from_attr) changed_by_fields.add(attr_name) for method_name, method_def in attrs['_map_methods'].iteritems(): changed_by_fields |= method_def[0] for base in bases: if hasattr(base, '_changed_by_fields') and base._changed_by_fields: changed_by_fields |= base._changed_by_fields cls._changed_by_fields = changed_by_fields super(MetaMapper, cls).__init__(name, bases, attrs) @staticmethod def _direct_source_field_name(mapping_attr): """ Get the mapping field name. Goes through the function modifiers. Ex: [(none(convert(field_name, str)), out_field_name)] It assumes that the modifier has ``field`` as first argument like: def modifier(field, args): """ attr_name = mapping_attr if callable(mapping_attr): # Map the closure entries with variable names cells = dict(zip( mapping_attr.func_code.co_freevars, (c.cell_contents for c in mapping_attr.func_closure))) assert 'field' in cells, "Modifier without 'field' argument." if callable(cells['field']): attr_name = MetaMapper._direct_source_field_name( cells['field']) else: attr_name = cells['field'] return attr_name class MapChild(ConnectorUnit): """ MapChild is responsible to convert items. Items are sub-records of a main record. In this example, the items are the records in ``lines``:: sales = {'name': 'SO10', 'lines': [{'product_id': 1, 'quantity': 2}, {'product_id': 2, 'quantity': 2}]} A MapChild is always called from another :py:class:`Mapper` which provides a ``children`` configuration. Considering the example above, the "main" :py:class:`Mapper` would returns something as follows:: {'name': 'SO10', 'lines': [(0, 0, {'product_id': 11, 'quantity': 2}), (0, 0, {'product_id': 12, 'quantity': 2})]} A MapChild is responsible to: * Find the :py:class:`Mapper` to convert the items * Possibly filter out some lines (can be done by inheriting :py:meth:`skip_item`) * Convert the items' records using the found :py:class:`Mapper` * Format the output values to the format expected by OpenERP or the backend (as seen above with ``(0, 0, {values})`` A MapChild can be extended like any other :py:class:`~connector.connector.ConnectorUnit`. However, it is not mandatory to explicitly create a MapChild for each children mapping, the default one will be used (:py:class:`ImportMapChild` or :py:class:`ExportMapChild`). The implementation by default does not take care of the updates: if I import a sales order 2 times, the lines will be duplicated. This is not a problem as long as an importation should only support the creation (typical for sales orders). It can be implemented on a case-by-case basis by inheriting :py:meth:`get_item_values` and :py:meth:`format_items`. """ _model_name = None def _child_mapper(self): raise NotImplementedError def skip_item(self, map_record): """ Hook to implement in sub-classes when some child records should be skipped. The parent record is accessible in ``map_record``. If it returns True, the current child record is skipped. :param map_record: record that we are converting :type map_record: :py:class:`MapRecord` """ return False def get_items(self, items, parent, to_attr, options): """ Returns the formatted output values of items from a main record :param items: list of item records :type items: list :param parent: parent record :param to_attr: destination field (can be used for introspecting the relation) :type to_attr: str :param options: dict of options, herited from the main mapper :return: formatted output values for the item """ mapper = self._child_mapper() mapped = [] for item in items: map_record = mapper.map_record(item, parent=parent) if self.skip_item(map_record): continue mapped.append(self.get_item_values(map_record, to_attr, options)) return self.format_items(mapped) def get_item_values(self, map_record, to_attr, options): """ Get the raw values from the child Mappers for the items. It can be overridden for instance to: * Change options * Use a :py:class:`~connector.connector.Binder` to know if an item already exists to modify an existing item, rather than to add it :param map_record: record that we are converting :type map_record: :py:class:`MapRecord` :param to_attr: destination field (can be used for introspecting the relation) :type to_attr: str :param options: dict of options, herited from the main mapper """ return map_record.values(**options) def format_items(self, items_values): """ Format the values of the items mapped from the child Mappers. It can be overridden for instance to add the OpenERP relationships commands ``(6, 0, [IDs])``, ... As instance, it can be modified to handle update of existing items: check if an 'id' has been defined by :py:meth:`get_item_values` then use the ``(1, ID, {values}``) command :param items_values: mapped values for the items :type items_values: list """ return items_values class ImportMapChild(MapChild): """ :py:class:`MapChild` for the Imports """ def _child_mapper(self): return self.unit_for(ImportMapper) def format_items(self, items_values): """ Format the values of the items mapped from the child Mappers. It can be overridden for instance to add the OpenERP relationships commands ``(6, 0, [IDs])``, ... As instance, it can be modified to handle update of existing items: check if an 'id' has been defined by :py:meth:`get_item_values` then use the ``(1, ID, {values}``) command :param items_values: list of values for the items to create :type items_values: list """ return [(0, 0, values) for values in items_values] class ExportMapChild(MapChild): """ :py:class:`MapChild` for the Exports """ def _child_mapper(self): return self.unit_for(ExportMapper) class Mapper(ConnectorUnit): """ A Mapper translates an external record to an OpenERP record and conversely. The output of a Mapper is a ``dict``. 3 types of mappings are supported: Direct Mappings Example:: direct = [('source', 'target')] Here, the ``source`` field will be copied in the ``target`` field. A modifier can be used in the source item. The modifier will be applied to the source field before being copied in the target field. It should be a closure function respecting this idiom:: def a_function(field): ''' ``field`` is the name of the source field. Naming the arg: ``field`` is required for the conversion''' def modifier(self, record, to_attr): ''' self is the current Mapper, record is the current record to map, to_attr is the target field''' return record[field] return modifier And used like that:: direct = [ (a_function('source'), 'target'), ] A more concrete example of modifier:: def convert(field, conv_type): ''' Convert the source field to a defined ``conv_type`` (ex. str) before returning it''' def modifier(self, record, to_attr): value = record[field] if not value: return None return conv_type(value) return modifier And used like that:: direct = [ (convert('myfield', float), 'target_field'), ] More examples of modifiers: * :py:func:`convert` * :py:func:`m2o_to_backend` * :py:func:`backend_to_m2o` Method Mappings A mapping method allows to execute arbitrary code and return one or many fields:: @mapping def compute_state(self, record): # compute some state, using the ``record`` or not state = 'pending' return {'state': state} We can also specify that a mapping methods should be applied only when an object is created, and never applied on further updates:: @only_create @mapping def default_warehouse(self, record): # get default warehouse warehouse_id = ... return {'warehouse_id': warehouse_id} Submappings When a record contains sub-items, like the lines of a sales order, we can convert the children using another Mapper:: children = [('items', 'line_ids', 'model.name')] It allows to create the sales order and all its lines with the same call to :py:meth:`openerp.models.BaseModel.create()`. When using ``children`` for items of a record, we need to create a :py:class:`Mapper` for the model of the items, and optionally a :py:class:`MapChild`. Usage of a Mapper:: mapper = Mapper(env) map_record = mapper.map_record(record) values = map_record.values() values = map_record.values(only_create=True) values = map_record.values(fields=['name', 'street']) """ __metaclass__ = MetaMapper # name of the OpenERP model, to be defined in concrete classes _model_name = None direct = [] # direct conversion of a field to another (from_attr, to_attr) children = [] # conversion of sub-records (from_attr, to_attr, model) _map_methods = None _map_child_class = None def __init__(self, connector_env): """ :param connector_env: current environment (backend, session, ...) :type connector_env: :py:class:`connector.connector.Environment` """ super(Mapper, self).__init__(connector_env) self._options = None def _map_direct(self, record, from_attr, to_attr): """ Apply the ``direct`` mappings. :param record: record to convert from a source to a target :param from_attr: name of the source attribute or a callable :type from_attr: callable | str :param to_attr: name of the target attribute :type to_attr: str """ raise NotImplementedError def _map_children(self, record, attr, model): raise NotImplementedError @property def map_methods(self): """ Yield all the methods decorated with ``@mapping`` """ for meth, definition in self._map_methods.iteritems(): yield getattr(self, meth), definition def _get_map_child_unit(self, model_name): try: mapper_child = self.unit_for(self._map_child_class, model=model_name) except NoConnectorUnitError: # does not force developers to use a MapChild -> # will use the default one if not explicitely defined env = ConnectorEnvironment(self.backend_record, self.session, model_name) mapper_child = self._map_child_class(env) return mapper_child def _map_child(self, map_record, from_attr, to_attr, model_name): """ Convert items of the record as defined by children """ assert self._map_child_class is not None, "_map_child_class required" child_records = map_record.source[from_attr] mapper_child = self._get_map_child_unit(model_name) items = mapper_child.get_items(child_records, map_record, to_attr, options=self.options) return items @contextmanager def _mapping_options(self, options): """ Change the mapping options for the Mapper. Context Manager to use in order to alter the behavior of the mapping, when using ``_apply`` or ``finalize``. """ current = self._options self._options = options yield self._options = current @property def options(self): """ Options can be accessed in the mapping methods with ``self.options``. """ return self._options def map_record(self, record, parent=None): """ Get a :py:class:`MapRecord` with record, ready to be converted using the current Mapper. :param record: record to transform :param parent: optional parent record, for items """ return MapRecord(self, record, parent=parent) def _apply(self, map_record, options=None): """ Apply the mappings on a :py:class:`MapRecord` :param map_record: source record to convert :type map_record: :py:class:`MapRecord` """ if options is None: options = {} with self._mapping_options(options): return self._apply_with_options(map_record) def _apply_with_options(self, map_record): """ Apply the mappings on a :py:class:`MapRecord` with contextual options (the ``options`` given in :py:meth:`MapRecord.values()` are accessible in ``self.options``) :param map_record: source record to convert :type map_record: :py:class:`MapRecord` """ assert self.options is not None, ( "options should be defined with '_mapping_options'") _logger.debug('converting record %s to model %s', map_record.source, self.model) fields = self.options.fields for_create = self.options.for_create result = {} for from_attr, to_attr in self.direct: if callable(from_attr): attr_name = MetaMapper._direct_source_field_name(from_attr) else: attr_name = from_attr if (not fields or attr_name in fields): value = self._map_direct(map_record.source, from_attr, to_attr) result[to_attr] = value for meth, definition in self.map_methods: mapping_changed_by = definition.changed_by if (not fields or not mapping_changed_by or mapping_changed_by.intersection(fields)): if definition.only_create and not for_create: continue values = meth(map_record.source) if not values: continue if not isinstance(values, dict): raise ValueError('%s: invalid return value for the ' 'mapping method %s' % (values, meth)) result.update(values) for from_attr, to_attr, model_name in self.children: if (not fields or from_attr in fields): result[to_attr] = self._map_child(map_record, from_attr, to_attr, model_name) return self.finalize(map_record, result) def finalize(self, map_record, values): """ Called at the end of the mapping. Can be used to modify the values before returning them, as the ``on_change``. :param map_record: source map_record :type map_record: :py:class:`MapRecord` :param values: mapped values :returns: mapped values :rtype: dict """ return values class ImportMapper(Mapper): """ :py:class:`Mapper` for imports. Transform a record from a backend to an OpenERP record """ _map_child_class = ImportMapChild def _map_direct(self, record, from_attr, to_attr): """ Apply the ``direct`` mappings. :param record: record to convert from a source to a target :param from_attr: name of the source attribute or a callable :type from_attr: callable | str :param to_attr: name of the target attribute :type to_attr: str """ if callable(from_attr): return from_attr(self, record, to_attr) value = record.get(from_attr) if not value: return False # Backward compatibility: when a field is a relation, and a modifier is # not used, we assume that the relation model is a binding. # Use an explicit modifier backend_to_m2o in the 'direct' mappings to # change that. field = self.model._fields[to_attr] if field.type == 'many2one': mapping_func = backend_to_m2o(from_attr) value = mapping_func(self, record, to_attr) return value class ExportMapper(Mapper): """ :py:class:`Mapper` for exports. Transform a record from OpenERP to a backend record """ _map_child_class = ExportMapChild def _map_direct(self, record, from_attr, to_attr): """ Apply the ``direct`` mappings. :param record: record to convert from a source to a target :param from_attr: name of the source attribute or a callable :type from_attr: callable | str :param to_attr: name of the target attribute :type to_attr: str """ if callable(from_attr): return from_attr(self, record, to_attr) value = record[from_attr] if not value: return False # Backward compatibility: when a field is a relation, and a modifier is # not used, we assume that the relation model is a binding. # Use an explicit modifier m2o_to_backend in the 'direct' mappings to # change that. field = self.model._fields[from_attr] if field.type == 'many2one': mapping_func = m2o_to_backend(from_attr) value = mapping_func(self, record, to_attr) return value class MapRecord(object): """ A record prepared to be converted using a :py:class:`Mapper`. MapRecord instances are prepared by :py:meth:`Mapper.map_record`. Usage:: mapper = SomeMapper(env) map_record = mapper.map_record(record) output_values = map_record.values() See :py:meth:`values` for more information on the available arguments. """ def __init__(self, mapper, source, parent=None): self._source = source self._mapper = mapper self._parent = parent self._forced_values = {} @property def source(self): """ Source record to be converted """ return self._source @property def parent(self): """ Parent record if the current record is an item """ return self._parent def values(self, for_create=None, fields=None, **kwargs): """ Build and returns the mapped values according to the options. Usage:: mapper = SomeMapper(env) map_record = mapper.map_record(record) output_values = map_record.values() Creation of records When using the option ``for_create``, only the mappings decorated with ``@only_create`` will be mapped. :: output_values = map_record.values(for_create=True) Filter on fields When using the ``fields`` argument, the mappings will be filtered using either the source key in ``direct`` arguments, either the ``changed_by`` arguments for the mapping methods. :: output_values = map_record.values(fields=['name', 'street']) Custom options Arbitrary key and values can be defined in the ``kwargs`` arguments. They can later be used in the mapping methods using ``self.options``. :: output_values = map_record.values(tax_include=True) :param for_create: specify if only the mappings for creation (``@only_create``) should be mapped. :type for_create: boolean :param fields: filter on fields :type fields: list :param **kwargs: custom options, they can later be used in the mapping methods """ options = MapOptions(for_create=for_create, fields=fields, **kwargs) values = self._mapper._apply(self, options=options) values.update(self._forced_values) return values def update(self, *args, **kwargs): """ Force values to be applied after a mapping. Usage:: mapper = SomeMapper(env) map_record = mapper.map_record(record) map_record.update(a=1) output_values = map_record.values() # output_values will at least contain {'a': 1} The values assigned with ``update()`` are in any case applied, they have a greater priority than the mapping values. """ self._forced_values.update(*args, **kwargs) class MapOptions(dict): """ Container for the options of mappings. Options can be accessed using attributes of the instance. When an option is accessed and does not exist, it returns None. """ def __getitem__(self, key): try: return super(MapOptions, self).__getitem__(key) except KeyError: return None def __getattr__(self, key): return self[key] def __setattr__(self, key, value): self[key] = value
from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('submission', '0011_auto_20170921_0937'), ('identifiers', '0003_brokendoi_journal'), ] operations = [ migrations.RemoveField( model_name='brokendoi', name='journal', ), migrations.AddField( model_name='brokendoi', name='article', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='submission.Article'), preserve_default=False, ), ]
import json, web from lib.log import Log class Env(object): @staticmethod def get(key): if key and key in web.ctx.env: return web.ctx.env[key] else: return web.ctx.env @staticmethod def set(key, value): web.ctx.env[key] = value @staticmethod def setFromFile(file): fenv = open(file) jenv = json.load(fenv) for key,value in jenv.items(): web.ctx.env[key] = value
from comics.aggregator.crawler import CrawlerBase from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = 'Billy' language = 'no' url = 'http://www.billy.no/' start_date = '1950-01-01' active = False rights = 'Mort Walker' class Crawler(CrawlerBase): def crawl(self, pub_date): pass # Comic no longer published
from setuptools import setup, find_packages XMODULES = [ "abtest = xmodule.abtest_module:ABTestDescriptor", "book = xmodule.backcompat_module:TranslateCustomTagDescriptor", "chapter = xmodule.seq_module:SequenceDescriptor", "combinedopenended = xmodule.combined_open_ended_module:CombinedOpenEndedDescriptor", "conditional = xmodule.conditional_module:ConditionalDescriptor", "course = xmodule.course_module:CourseDescriptor", "customtag = xmodule.template_module:CustomTagDescriptor", "discuss = xmodule.backcompat_module:TranslateCustomTagDescriptor", "html = xmodule.html_module:HtmlDescriptor", "image = xmodule.backcompat_module:TranslateCustomTagDescriptor", "error = xmodule.error_module:ErrorDescriptor", "peergrading = xmodule.peer_grading_module:PeerGradingDescriptor", "poll_question = xmodule.poll_module:PollDescriptor", "problem = xmodule.capa_module:CapaDescriptor", "problemset = xmodule.seq_module:SequenceDescriptor", "randomize = xmodule.randomize_module:RandomizeDescriptor", "section = xmodule.backcompat_module:SemanticSectionDescriptor", "sequential = xmodule.seq_module:SequenceDescriptor", "slides = xmodule.backcompat_module:TranslateCustomTagDescriptor", "vertical = xmodule.vertical_module:VerticalDescriptor", "video = xmodule.video_module:VideoDescriptor", "videoalpha = xmodule.video_module:VideoDescriptor", "videodev = xmodule.backcompat_module:TranslateCustomTagDescriptor", "videosequence = xmodule.seq_module:SequenceDescriptor", "discussion = xmodule.discussion_module:DiscussionDescriptor", "course_info = xmodule.html_module:CourseInfoDescriptor", "static_tab = xmodule.html_module:StaticTabDescriptor", "custom_tag_template = xmodule.raw_module:RawDescriptor", "about = xmodule.html_module:AboutDescriptor", "wrapper = xmodule.wrapper_module:WrapperDescriptor", "graphical_slider_tool = xmodule.gst_module:GraphicalSliderToolDescriptor", "annotatable = xmodule.annotatable_module:AnnotatableDescriptor", "textannotation = xmodule.textannotation_module:TextAnnotationDescriptor", "videoannotation = xmodule.videoannotation_module:VideoAnnotationDescriptor", "foldit = xmodule.foldit_module:FolditDescriptor", "word_cloud = xmodule.word_cloud_module:WordCloudDescriptor", "hidden = xmodule.hidden_module:HiddenDescriptor", "raw = xmodule.raw_module:RawDescriptor", "crowdsource_hinter = xmodule.crowdsource_hinter:CrowdsourceHinterDescriptor", "lti = xmodule.lti_module:LTIDescriptor", ] setup( name="XModule", version="0.1", packages=find_packages(exclude=["tests"]), install_requires=[ 'distribute', 'docopt', 'capa', 'path.py', 'webob', ], package_data={ 'xmodule': ['js/module/*'], }, # See http://guide.python-distribute.org/creation.html#entry-points # for a description of entry_points entry_points={ 'xblock.v1': XMODULES, 'xmodule.v1': XMODULES, 'console_scripts': [ 'xmodule_assets = xmodule.static_content:main', ], }, )
import sys from compmusic.extractors.imagelib.MelSpectrogramImage import create_wave_images from processing import AudioProcessingException ''' parser = optparse.OptionParser("usage: %prog [options] input-filename", conflict_handler="resolve") parser.add_option("-a", "--waveout", action="store", dest="output_filename_w", type="string", help="output waveform image (default input filename + _w.png)") parser.add_option("-s", "--specout", action="store", dest="output_filename_s", type="string", help="output spectrogram image (default input filename + _s.jpg)") parser.add_option("-w", "--width", action="store", dest="image_width", type="int", help="image width in pixels (default %default)") parser.add_option("-h", "--height", action="store", dest="image_height", type="int", help="image height in pixels (default %default)") parser.add_option("-f", "--fft", action="store", dest="fft_size", type="int", help="fft size, power of 2 for increased performance (default %default)") parser.add_option("-p", "--profile", action="store_true", dest="profile", help="run profiler and output profiling information") parser.set_defaults(output_filename_w=None, output_filename_s=None, image_width=500, image_height=171, fft_size=2048) (options, args) = parser.parse_args() if len(args) == 0: parser.print_help() parser.error("not enough arguments") if len(args) > 1 and (options.output_filename_w != None or options.output_filename_s != None): parser.error("when processing multiple files you can't define the output filename!") ''' def progress_callback(percentage): sys.stdout.write(str(percentage) + "% ") sys.stdout.flush() # process all files so the user can use wildcards like *.wav def genimages(input_file, output_file_w, output_file_s, output_file_m, options): args = (input_file, output_file_w, output_file_s, output_file_m, options.image_width, options.image_height, options.fft_size, progress_callback, options.f_min, options.f_max, options.scale_exp, options.pallete) print("processing file %s:\n\t" % input_file, end="") try: create_wave_images(*args) except AudioProcessingException as e: print("Error running wav2png: ", e)